| Name | Access-CI Login ID |
| -------- | ------- |
| Aditya Balu | baditya |
|Yash Puri | ypuri |
| Nathan Van Utrecht | nvan21 |
| Elijah Rodriguez | eli320 |
| Ezra Odole | ezrao |
| Rwit Chakravorty | rwit12 |
| Justin Rodriguez | jjrodz96 |
| Alan Snow | snowman2 |
|Fatemeh Delzendehrooy| fdzr |
| Rim Nassiri | rnassiri |
|Micah Wallace | miwal15 |
| Pallavi Baliyara | pallavi423 |
| Chase Franklin | chasef1 |
| Fergal Hennessy | cooordinateBash |
| Mehdi Saraeian | mehdis|
| Fadahunsi Adeife | adeife |
| Daniella Benitez | benitez123 |
| Bitgoeul Kim | bgekim |
| Brennan Holm | bholm |
| Jia Ling Pik | jpik |
| Jeffrey Wetter | jwetter |
|Anneka Singh | anneka |
| Bach Nguyen | ntbach |
|Ashton Corpuz |acorpuz |
| Aaron Bertram | abertram |
Badrinath Balasubramaniam|bb2 |
| Lucas Gutknecht | lwg001 |
| Arti Singh | arti |
| Aneesh Shrotriya | ashro |
| William Griner | willyg |
| Ethan Herron | edherron|
[Lightning Collab Notebook](https://colab.research.google.com/drive/1rwlKbekjLRtIZWYGWi9lUbWRDmcAokUn#scrollTo=EMm3shLGC64l)
JS2 Cloud1: https://149-165-175-132.js2proxy.cacao.run/
JS2 Cloud2: https://149-165-168-236.js2proxy.cacao.run/
```python
# Define a model.
class ConvNet(nn.Module):
# This defines the structure of the CNN.
def __init__(self):
super(ConvNet, self).__init__()
image_dim_size = 28
conv1_kernel_size = 5
conv1_out_channels = 10
conv2_kernel_size = 5
conv2_out_channels = 20
self.max_pool1_kernel_size = 2
self.max_pool2_kernel_size = 2
self.conv1 = nn.Conv2d(in_channels=1, out_channels=conv1_out_channels, kernel_size=conv1_kernel_size)
self.conv2 = nn.Conv2d(in_channels=conv1_out_channels, out_channels=conv2_out_channels, kernel_size=conv2_kernel_size)
self.conv2_drop = nn.Dropout2d() # Dropout
conv1_dim_size = image_dim_size - conv1_kernel_size + 1
max_pool1_dim_size = int(conv1_dim_size / self.max_pool1_kernel_size)
conv2_dim_size = max_pool1_dim_size - conv2_kernel_size + 1
max_pool2_dim_size = int(conv2_dim_size / self.max_pool2_kernel_size)
fc1_in_features = conv2_out_channels * (max_pool2_dim_size ** 2)
self.fc1 = nn.Linear(in_features=fc1_in_features, out_features=50)
self.fc2 = nn.Linear(in_features=50, out_features=10)
def forward(self, x):
# Convolutional Layer/Pooling Layer/Activation
x = self.conv1(x)
x = F.relu(F.max_pool2d(x, kernel_size=self.max_pool1_kernel_size))
# Convolutional Layer/Dropout/Pooling Layer/Activation
x = self.conv2(x)
x = F.relu(F.max_pool2d(self.conv2_drop(x), kernel_size=self.max_pool2_kernel_size))
x = x.view(-1, 320)
# Fully Connected Layer/Activation
x = F.relu(self.fc1(x))
x = F.dropout(x, training=self.training)
# Fully Connected Layer/Activation
x = self.fc2(x)
# Softmax gets probabilities.
return F.log_softmax(x, dim=1)
```
# Computer Vision Dataset
[https://iastate.box.com/s/0d1zo64mw6edzw4bqhiwycx1zx35ejq2](https://iastate.box.com/s/0d1zo64mw6edzw4bqhiwycx1zx35ejq2)
```csvpreview
file,driveway
ATT10000_PropertyConditionAssessment_image-20220812-162947.jpg,1
ATT10001_PropertyConditionAssessment_image-20220812-162733.jpg,1
ATT10002_PropertyConditionAssessment_image-20220812-163221.jpg,1
ATT10003_PropertyConditionAssessment_image-20220812-163214.jpg,1
ATT10004_PropertyConditionAssessment_image-20220812-163208.jpg,1
ATT10005_PropertyConditionAssessment_image-20220812-162602.jpg,1
ATT10006_PropertyConditionAssessment_image-20220812-163253.jpg,0
ATT10007_PropertyConditionAssessment_image-20220812-163258.jpg,0
ATT10008_PropertyConditionAssessment_image-20220812-162906.jpg,0
ATT10009_PropertyConditionAssessment_image-20220812-163322.jpg,1
ATT1000_PropertyConditionAssessment_image-20220623-195513.jpg,0
ATT10010_PropertyConditionAssessment_image-20220812-163222.jpg,1
ATT10011_PropertyConditionAssessment_image-20220812-163421.jpg,0
ATT10012_PropertyConditionAssessment_image-20220812-153545.jpg,0
ATT10013_PropertyConditionAssessment_image-20220812-153140.jpg,1
ATT10014_PropertyConditionAssessment_image-20220812-153708.jpg,1
ATT10015_PropertyConditionAssessment_image-20220812-153726.jpg,1
ATT10016_PropertyConditionAssessment_image-20220812-153709.jpg,0
ATT10017_PropertyConditionAssessment_image-20220812-153801.jpg,1
ATT10018_PropertyConditionAssessment_image-20220812-153919.jpg,1
ATT10019_PropertyConditionAssessment_image-20220812-153551.jpg,1
ATT1001_PropertyConditionAssessment_image-20220623-195632.jpg,0
ATT10020_PropertyConditionAssessment_image-20220812-153944.jpg,1
ATT10021_PropertyConditionAssessment_image-20220812-153924.jpg,1
ATT10022_PropertyConditionAssessment_image-20220812-154205.jpg,0
ATT10023_PropertyConditionAssessment_image-20220812-150507.jpg,1
ATT10024_PropertyConditionAssessment_image-20220812-154224.jpg,1
ATT10025_PropertyConditionAssessment_image-20220812-154152.jpg,0
ATT10026_PropertyConditionAssessment_image-20220812-154434.jpg,1
ATT10027_PropertyConditionAssessment_image-20220812-154016.jpg,1
ATT10028_PropertyConditionAssessment_image-20220812-154502.jpg,0
ATT10029_PropertyConditionAssessment_image-20220812-154526.jpg,1
ATT1002_PropertyConditionAssessment_image-20220623-195747.jpg,0
ATT10030_PropertyConditionAssessment_image-20220812-154356.jpg,1
ATT10031_PropertyConditionAssessment_image-20220812-154525.jpg,1
ATT10032_PropertyConditionAssessment_image-20220812-154657.jpg,0
ATT10033_PropertyConditionAssessment_image-20220812-154325.jpg,1
ATT10034_PropertyConditionAssessment_image-20220812-154818.jpg,0
ATT10035_PropertyConditionAssessment_image-20220812-154827.jpg,1
ATT10036_PropertyConditionAssessment_image-20220812-154739.jpg,1
ATT10037_PropertyConditionAssessment_image-20220812-154934.jpg,1
ATT10038_PropertyConditionAssessment_image-20220812-155009.jpg,1
ATT10039_PropertyConditionAssessment_image-20220812-154533.jpg,0
ATT1003_PropertyConditionAssessment_image-20220623-200003.jpg,1
ATT10040_PropertyConditionAssessment_image-20220812-155141.jpg,1
ATT10041_PropertyConditionAssessment_image-20220812-155038.jpg,1
ATT10042_PropertyConditionAssessment_image-20220812-155404.jpg,0
ATT10043_PropertyConditionAssessment_image-20220812-155104.jpg,1
ATT10044_PropertyConditionAssessment_image-20220812-155454.jpg,1
ATT10045_PropertyConditionAssessment_image-20220812-155408.jpg,1
ATT10046_PropertyConditionAssessment_image-20220812-155459.jpg,1
ATT10047_PropertyConditionAssessment_image-20220812-155621.jpg,1
ATT10048_PropertyConditionAssessment_image-20220812-184052.jpg,1
ATT10049_PropertyConditionAssessment_image-20220812-184119.jpg,1
ATT1004_PropertyConditionAssessment_image-20220623-195828.jpg,0
ATT10050_PropertyConditionAssessment_image-20220812-184150.jpg,1
ATT10051_PropertyConditionAssessment_image-20220812-184156.jpg,1
ATT10052_PropertyConditionAssessment_image-20220812-184201.jpg,0
ATT10053_PropertyConditionAssessment_image-20220812-165622.jpg,0
ATT10054_PropertyConditionAssessment_image-20220812-182816.jpg,0
ATT10055_PropertyConditionAssessment_image-20220812-163512.jpg,1
ATT10056_PropertyConditionAssessment_image-20220812-163605.jpg,1
ATT10057_PropertyConditionAssessment_image-20220812-163506.jpg,1
ATT10058_PropertyConditionAssessment_image-20220812-163416.jpg,1
ATT10059_PropertyConditionAssessment_image-20220812-163701.jpg,0
ATT1005_PropertyConditionAssessment_image-20220623-195920.jpg,1
ATT10060_PropertyConditionAssessment_image-20220812-163554.jpg,1
ATT10061_PropertyConditionAssessment_image-20220812-163726.jpg,0
ATT10062_PropertyConditionAssessment_image-20220812-163504.jpg,0
ATT10063_PropertyConditionAssessment_image-20220812-163806.jpg,0
ATT10064_PropertyConditionAssessment_image-20220812-163834.jpg,1
ATT10065_PropertyConditionAssessment_image-20220812-163805.jpg,0
ATT10066_PropertyConditionAssessment_image-20220812-163351.jpg,0
ATT10067_PropertyConditionAssessment_image-20220812-163909.jpg,0
ATT10068_PropertyConditionAssessment_image-20220812-163926.jpg,1
ATT10069_PropertyConditionAssessment_image-20220812-163814.jpg,0
ATT1006_PropertyConditionAssessment_image-20220623-200048.jpg,0
ATT10070_PropertyConditionAssessment_image-20220812-163859.jpg,1
ATT10071_PropertyConditionAssessment_image-20220812-161852.jpg,0
ATT10072_PropertyConditionAssessment_image-20220812-163651.jpg,0
ATT10073_PropertyConditionAssessment_image-20220812-164116.jpg,1
ATT10074_PropertyConditionAssessment_image-20220812-164144.jpg,0
ATT10075_PropertyConditionAssessment_image-20220812-164531.jpg,1
ATT10076_PropertyConditionAssessment_image-20220812-164544.jpg,1
ATT10077_PropertyConditionAssessment_image-20220812-164709.jpg,0
ATT10078_PropertyConditionAssessment_image-20220812-164715.jpg,1
ATT10079_PropertyConditionAssessment_image-20220812-164338.jpg,1
ATT1007_PropertyConditionAssessment_image-20220623-200124.jpg,1
ATT10080_PropertyConditionAssessment_image-20220812-165214.jpg,1
ATT10081_PropertyConditionAssessment_image-20220812-164800.jpg,1
ATT10082_PropertyConditionAssessment_image-20220812-165400.jpg,1
ATT10083_PropertyConditionAssessment_image-20220812-165222.jpg,1
ATT10084_PropertyConditionAssessment_image-20220812-164949.jpg,0
ATT10085_PropertyConditionAssessment_image-20220812-165415.jpg,1
ATT10086_PropertyConditionAssessment_image-20220812-165246.jpg,0
ATT10087_PropertyConditionAssessment_image-20220812-165446.jpg,0
ATT10088_PropertyConditionAssessment_image-20220812-165558.jpg,1
ATT10089_PropertyConditionAssessment_image-20220812-165259.jpg,1
ATT1008_PropertyConditionAssessment_image-20220623-195639.jpg,1
ATT10090_PropertyConditionAssessment_image-20220812-165612.jpg,0
```
# Example DataSet class for driveway classification
```python
import torch, os
from torch.utils.data import Dataset
from torchvision import transforms
from PIL import Image
import pandas as pd
class ImageLabelDataset(Dataset):
def __init__(self, csv_file, root_dir, transform=None):
"""
Args:
csv_file (string): Path to the csv file with annotations.
root_dir (string): Directory with all the images.
transform (callable, optional): Optional transform to be applied on a sample.
"""
self.labels_df = pd.read_csv(csv_file)
self.root_dir = root_dir
self.transform = transform
def __len__(self):
return len(self.labels_df)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
img_name = os.path.join(self.root_dir, self.labels_df.iloc[idx, 0])
image = Image.open(img_name).convert('RGB')
label = torch.tensor(self.labels_df.iloc[idx, 1], dtype=torch.float32)
if self.transform:
image = self.transform(image)
return image, label
# Example usage
transform = transforms.Compose([
transforms.Resize((256, 256)),
transforms.ToTensor()
])
dataset = ImageLabelDataset(csv_file='file_labels.csv', root_dir='data', transform=transform)
```
Example PL-Wandb code: https://colab.research.google.com/github/wandb/examples/blob/master/colabs/pytorch-lightning/Image_Classification_using_PyTorch_Lightning.ipynb#scrollTo=uMQ8UAw5HY4J
Torchvision Documentation:
Colab Notebook for Image Classification of driveway
https://colab.research.google.com/drive/1gwDuhELzjPJ43fJUaLzPJOTy_b2dqw7H?usp=sharing
CV notebooks:
https://github.com/TranslationalAICenterISU/cv-ssl-aug-2023/tree/main/notebooks
CLIP notebook: https://colab.research.google.com/drive/1xVAd4Ff3kvHrdYfirX2JvSSc2bJd7XB5?usp=sharing
Survey: https://forms.gle/njfEkCZah7JKX5sz6
NLP Notebooks: https://iastate.box.com/s/ryw5bgo9wk86mik7mlxjjtpxlz73cd2k