added cifar/cars manifests, added model templates
This commit is contained in:
parent
1d505f644d
commit
cb18ffa937
16185
cars/allimages.txt
Executable file
16185
cars/allimages.txt
Executable file
File diff suppressed because it is too large
Load Diff
163
cars/default-split/test.txt
Normal file
163
cars/default-split/test.txt
Normal file
@ -0,0 +1,163 @@
|
||||
/scratch/Teaching/cars/car_ims/007753.jpg 94
|
||||
/scratch/Teaching/cars/car_ims/001926.jpg 23
|
||||
/scratch/Teaching/cars/car_ims/002409.jpg 29
|
||||
/scratch/Teaching/cars/car_ims/000646.jpg 7
|
||||
/scratch/Teaching/cars/car_ims/011943.jpg 145
|
||||
/scratch/Teaching/cars/car_ims/013830.jpg 167
|
||||
/scratch/Teaching/cars/car_ims/000126.jpg 1
|
||||
/scratch/Teaching/cars/car_ims/013149.jpg 160
|
||||
/scratch/Teaching/cars/car_ims/011360.jpg 137
|
||||
/scratch/Teaching/cars/car_ims/012185.jpg 147
|
||||
/scratch/Teaching/cars/car_ims/011741.jpg 142
|
||||
/scratch/Teaching/cars/car_ims/013570.jpg 164
|
||||
/scratch/Teaching/cars/car_ims/013540.jpg 164
|
||||
/scratch/Teaching/cars/car_ims/015948.jpg 193
|
||||
/scratch/Teaching/cars/car_ims/002641.jpg 32
|
||||
/scratch/Teaching/cars/car_ims/003179.jpg 38
|
||||
/scratch/Teaching/cars/car_ims/002426.jpg 29
|
||||
/scratch/Teaching/cars/car_ims/011400.jpg 138
|
||||
/scratch/Teaching/cars/car_ims/008233.jpg 100
|
||||
/scratch/Teaching/cars/car_ims/009852.jpg 119
|
||||
/scratch/Teaching/cars/car_ims/003280.jpg 40
|
||||
/scratch/Teaching/cars/car_ims/011885.jpg 144
|
||||
/scratch/Teaching/cars/car_ims/008443.jpg 103
|
||||
/scratch/Teaching/cars/car_ims/004406.jpg 54
|
||||
/scratch/Teaching/cars/car_ims/001308.jpg 16
|
||||
/scratch/Teaching/cars/car_ims/012276.jpg 148
|
||||
/scratch/Teaching/cars/car_ims/011234.jpg 136
|
||||
/scratch/Teaching/cars/car_ims/016124.jpg 195
|
||||
/scratch/Teaching/cars/car_ims/008825.jpg 107
|
||||
/scratch/Teaching/cars/car_ims/010232.jpg 123
|
||||
/scratch/Teaching/cars/car_ims/003561.jpg 43
|
||||
/scratch/Teaching/cars/car_ims/013542.jpg 164
|
||||
/scratch/Teaching/cars/car_ims/009953.jpg 121
|
||||
/scratch/Teaching/cars/car_ims/003438.jpg 42
|
||||
/scratch/Teaching/cars/car_ims/000941.jpg 11
|
||||
/scratch/Teaching/cars/car_ims/002437.jpg 29
|
||||
/scratch/Teaching/cars/car_ims/001917.jpg 23
|
||||
/scratch/Teaching/cars/car_ims/008014.jpg 97
|
||||
/scratch/Teaching/cars/car_ims/010567.jpg 127
|
||||
/scratch/Teaching/cars/car_ims/007508.jpg 91
|
||||
/scratch/Teaching/cars/car_ims/014634.jpg 177
|
||||
/scratch/Teaching/cars/car_ims/008810.jpg 107
|
||||
/scratch/Teaching/cars/car_ims/011576.jpg 140
|
||||
/scratch/Teaching/cars/car_ims/000834.jpg 10
|
||||
/scratch/Teaching/cars/car_ims/014925.jpg 181
|
||||
/scratch/Teaching/cars/car_ims/008870.jpg 108
|
||||
/scratch/Teaching/cars/car_ims/004391.jpg 54
|
||||
/scratch/Teaching/cars/car_ims/007491.jpg 91
|
||||
/scratch/Teaching/cars/car_ims/000901.jpg 11
|
||||
/scratch/Teaching/cars/car_ims/004833.jpg 59
|
||||
/scratch/Teaching/cars/car_ims/005880.jpg 72
|
||||
/scratch/Teaching/cars/car_ims/004545.jpg 55
|
||||
/scratch/Teaching/cars/car_ims/001110.jpg 13
|
||||
/scratch/Teaching/cars/car_ims/013531.jpg 164
|
||||
/scratch/Teaching/cars/car_ims/000651.jpg 7
|
||||
/scratch/Teaching/cars/car_ims/014539.jpg 176
|
||||
/scratch/Teaching/cars/car_ims/005915.jpg 72
|
||||
/scratch/Teaching/cars/car_ims/011270.jpg 136
|
||||
/scratch/Teaching/cars/car_ims/005412.jpg 66
|
||||
/scratch/Teaching/cars/car_ims/000420.jpg 5
|
||||
/scratch/Teaching/cars/car_ims/007416.jpg 90
|
||||
/scratch/Teaching/cars/car_ims/016118.jpg 195
|
||||
/scratch/Teaching/cars/car_ims/010336.jpg 124
|
||||
/scratch/Teaching/cars/car_ims/014895.jpg 180
|
||||
/scratch/Teaching/cars/car_ims/013320.jpg 161
|
||||
/scratch/Teaching/cars/car_ims/006408.jpg 78
|
||||
/scratch/Teaching/cars/car_ims/004836.jpg 59
|
||||
/scratch/Teaching/cars/car_ims/008226.jpg 100
|
||||
/scratch/Teaching/cars/car_ims/001449.jpg 17
|
||||
/scratch/Teaching/cars/car_ims/002582.jpg 31
|
||||
/scratch/Teaching/cars/car_ims/009383.jpg 114
|
||||
/scratch/Teaching/cars/car_ims/013361.jpg 162
|
||||
/scratch/Teaching/cars/car_ims/008887.jpg 108
|
||||
/scratch/Teaching/cars/car_ims/015562.jpg 188
|
||||
/scratch/Teaching/cars/car_ims/010545.jpg 127
|
||||
/scratch/Teaching/cars/car_ims/013231.jpg 160
|
||||
/scratch/Teaching/cars/car_ims/007676.jpg 93
|
||||
/scratch/Teaching/cars/car_ims/013180.jpg 160
|
||||
/scratch/Teaching/cars/car_ims/011634.jpg 141
|
||||
/scratch/Teaching/cars/car_ims/007651.jpg 93
|
||||
/scratch/Teaching/cars/car_ims/015163.jpg 183
|
||||
/scratch/Teaching/cars/car_ims/013526.jpg 164
|
||||
/scratch/Teaching/cars/car_ims/005603.jpg 69
|
||||
/scratch/Teaching/cars/car_ims/007724.jpg 94
|
||||
/scratch/Teaching/cars/car_ims/003868.jpg 47
|
||||
/scratch/Teaching/cars/car_ims/004508.jpg 55
|
||||
/scratch/Teaching/cars/car_ims/007257.jpg 88
|
||||
/scratch/Teaching/cars/car_ims/007871.jpg 95
|
||||
/scratch/Teaching/cars/car_ims/003218.jpg 39
|
||||
/scratch/Teaching/cars/car_ims/013680.jpg 166
|
||||
/scratch/Teaching/cars/car_ims/001849.jpg 22
|
||||
/scratch/Teaching/cars/car_ims/000765.jpg 9
|
||||
/scratch/Teaching/cars/car_ims/000326.jpg 4
|
||||
/scratch/Teaching/cars/car_ims/002919.jpg 35
|
||||
/scratch/Teaching/cars/car_ims/010974.jpg 132
|
||||
/scratch/Teaching/cars/car_ims/014412.jpg 174
|
||||
/scratch/Teaching/cars/car_ims/014086.jpg 170
|
||||
/scratch/Teaching/cars/car_ims/013943.jpg 169
|
||||
/scratch/Teaching/cars/car_ims/015375.jpg 186
|
||||
/scratch/Teaching/cars/car_ims/013271.jpg 161
|
||||
/scratch/Teaching/cars/car_ims/014190.jpg 172
|
||||
/scratch/Teaching/cars/car_ims/009930.jpg 120
|
||||
/scratch/Teaching/cars/car_ims/000274.jpg 3
|
||||
/scratch/Teaching/cars/car_ims/004151.jpg 51
|
||||
/scratch/Teaching/cars/car_ims/006930.jpg 84
|
||||
/scratch/Teaching/cars/car_ims/015063.jpg 182
|
||||
/scratch/Teaching/cars/car_ims/004477.jpg 55
|
||||
/scratch/Teaching/cars/car_ims/013667.jpg 166
|
||||
/scratch/Teaching/cars/car_ims/007232.jpg 88
|
||||
/scratch/Teaching/cars/car_ims/014273.jpg 172
|
||||
/scratch/Teaching/cars/car_ims/013920.jpg 169
|
||||
/scratch/Teaching/cars/car_ims/003833.jpg 47
|
||||
/scratch/Teaching/cars/car_ims/007669.jpg 93
|
||||
/scratch/Teaching/cars/car_ims/002650.jpg 32
|
||||
/scratch/Teaching/cars/car_ims/014180.jpg 171
|
||||
/scratch/Teaching/cars/car_ims/015420.jpg 187
|
||||
/scratch/Teaching/cars/car_ims/006879.jpg 84
|
||||
/scratch/Teaching/cars/car_ims/001916.jpg 23
|
||||
/scratch/Teaching/cars/car_ims/009356.jpg 113
|
||||
/scratch/Teaching/cars/car_ims/000782.jpg 9
|
||||
/scratch/Teaching/cars/car_ims/007928.jpg 96
|
||||
/scratch/Teaching/cars/car_ims/005830.jpg 71
|
||||
/scratch/Teaching/cars/car_ims/007756.jpg 94
|
||||
/scratch/Teaching/cars/car_ims/009398.jpg 114
|
||||
/scratch/Teaching/cars/car_ims/001939.jpg 23
|
||||
/scratch/Teaching/cars/car_ims/011592.jpg 140
|
||||
/scratch/Teaching/cars/car_ims/001945.jpg 23
|
||||
/scratch/Teaching/cars/car_ims/009433.jpg 114
|
||||
/scratch/Teaching/cars/car_ims/003113.jpg 38
|
||||
/scratch/Teaching/cars/car_ims/011000.jpg 132
|
||||
/scratch/Teaching/cars/car_ims/009984.jpg 121
|
||||
/scratch/Teaching/cars/car_ims/009275.jpg 112
|
||||
/scratch/Teaching/cars/car_ims/011702.jpg 142
|
||||
/scratch/Teaching/cars/car_ims/002462.jpg 30
|
||||
/scratch/Teaching/cars/car_ims/007304.jpg 89
|
||||
/scratch/Teaching/cars/car_ims/009784.jpg 119
|
||||
/scratch/Teaching/cars/car_ims/001973.jpg 23
|
||||
/scratch/Teaching/cars/car_ims/014398.jpg 174
|
||||
/scratch/Teaching/cars/car_ims/005882.jpg 72
|
||||
/scratch/Teaching/cars/car_ims/010154.jpg 122
|
||||
/scratch/Teaching/cars/car_ims/009819.jpg 119
|
||||
/scratch/Teaching/cars/car_ims/014327.jpg 173
|
||||
/scratch/Teaching/cars/car_ims/003191.jpg 39
|
||||
/scratch/Teaching/cars/car_ims/000801.jpg 9
|
||||
/scratch/Teaching/cars/car_ims/003234.jpg 39
|
||||
/scratch/Teaching/cars/car_ims/011294.jpg 136
|
||||
/scratch/Teaching/cars/car_ims/010463.jpg 126
|
||||
/scratch/Teaching/cars/car_ims/004999.jpg 61
|
||||
/scratch/Teaching/cars/car_ims/007164.jpg 87
|
||||
/scratch/Teaching/cars/car_ims/006816.jpg 83
|
||||
/scratch/Teaching/cars/car_ims/009883.jpg 120
|
||||
/scratch/Teaching/cars/car_ims/007241.jpg 88
|
||||
/scratch/Teaching/cars/car_ims/012799.jpg 155
|
||||
/scratch/Teaching/cars/car_ims/001293.jpg 15
|
||||
/scratch/Teaching/cars/car_ims/011263.jpg 136
|
||||
/scratch/Teaching/cars/car_ims/016136.jpg 195
|
||||
/scratch/Teaching/cars/car_ims/005974.jpg 73
|
||||
/scratch/Teaching/cars/car_ims/011320.jpg 137
|
||||
/scratch/Teaching/cars/car_ims/005991.jpg 73
|
||||
/scratch/Teaching/cars/car_ims/002331.jpg 28
|
||||
/scratch/Teaching/cars/car_ims/006916.jpg 84
|
||||
/scratch/Teaching/cars/car_ims/014568.jpg 176
|
||||
/scratch/Teaching/cars/car_ims/004747.jpg 58
|
14566
cars/default-split/train.txt
Executable file
14566
cars/default-split/train.txt
Executable file
File diff suppressed because it is too large
Load Diff
1456
cars/default-split/val.txt
Executable file
1456
cars/default-split/val.txt
Executable file
File diff suppressed because it is too large
Load Diff
196
cars/labels.txt
Executable file
196
cars/labels.txt
Executable file
@ -0,0 +1,196 @@
|
||||
AM General Hummer SUV 2000
|
||||
Acura RL Sedan 2012
|
||||
Acura TL Sedan 2012
|
||||
Acura TL Type-S 2008
|
||||
Acura TSX Sedan 2012
|
||||
Acura Integra Type R 2001
|
||||
Acura ZDX Hatchback 2012
|
||||
Aston Martin V8 Vantage Convertible 2012
|
||||
Aston Martin V8 Vantage Coupe 2012
|
||||
Aston Martin Virage Convertible 2012
|
||||
Aston Martin Virage Coupe 2012
|
||||
Audi RS 4 Convertible 2008
|
||||
Audi A5 Coupe 2012
|
||||
Audi TTS Coupe 2012
|
||||
Audi R8 Coupe 2012
|
||||
Audi V8 Sedan 1994
|
||||
Audi 100 Sedan 1994
|
||||
Audi 100 Wagon 1994
|
||||
Audi TT Hatchback 2011
|
||||
Audi S6 Sedan 2011
|
||||
Audi S5 Convertible 2012
|
||||
Audi S5 Coupe 2012
|
||||
Audi S4 Sedan 2012
|
||||
Audi S4 Sedan 2007
|
||||
Audi TT RS Coupe 2012
|
||||
BMW ActiveHybrid 5 Sedan 2012
|
||||
BMW 1 Series Convertible 2012
|
||||
BMW 1 Series Coupe 2012
|
||||
BMW 3 Series Sedan 2012
|
||||
BMW 3 Series Wagon 2012
|
||||
BMW 6 Series Convertible 2007
|
||||
BMW X5 SUV 2007
|
||||
BMW X6 SUV 2012
|
||||
BMW M3 Coupe 2012
|
||||
BMW M5 Sedan 2010
|
||||
BMW M6 Convertible 2010
|
||||
BMW X3 SUV 2012
|
||||
BMW Z4 Convertible 2012
|
||||
Bentley Continental Supersports Conv. Convertible 2012
|
||||
Bentley Arnage Sedan 2009
|
||||
Bentley Mulsanne Sedan 2011
|
||||
Bentley Continental GT Coupe 2012
|
||||
Bentley Continental GT Coupe 2007
|
||||
Bentley Continental Flying Spur Sedan 2007
|
||||
Bugatti Veyron 16.4 Convertible 2009
|
||||
Bugatti Veyron 16.4 Coupe 2009
|
||||
Buick Regal GS 2012
|
||||
Buick Rainier SUV 2007
|
||||
Buick Verano Sedan 2012
|
||||
Buick Enclave SUV 2012
|
||||
Cadillac CTS-V Sedan 2012
|
||||
Cadillac SRX SUV 2012
|
||||
Cadillac Escalade EXT Crew Cab 2007
|
||||
Chevrolet Silverado 1500 Hybrid Crew Cab 2012
|
||||
Chevrolet Corvette Convertible 2012
|
||||
Chevrolet Corvette ZR1 2012
|
||||
Chevrolet Corvette Ron Fellows Edition Z06 2007
|
||||
Chevrolet Traverse SUV 2012
|
||||
Chevrolet Camaro Convertible 2012
|
||||
Chevrolet HHR SS 2010
|
||||
Chevrolet Impala Sedan 2007
|
||||
Chevrolet Tahoe Hybrid SUV 2012
|
||||
Chevrolet Sonic Sedan 2012
|
||||
Chevrolet Express Cargo Van 2007
|
||||
Chevrolet Avalanche Crew Cab 2012
|
||||
Chevrolet Cobalt SS 2010
|
||||
Chevrolet Malibu Hybrid Sedan 2010
|
||||
Chevrolet TrailBlazer SS 2009
|
||||
Chevrolet Silverado 2500HD Regular Cab 2012
|
||||
Chevrolet Silverado 1500 Classic Extended Cab 2007
|
||||
Chevrolet Express Van 2007
|
||||
Chevrolet Monte Carlo Coupe 2007
|
||||
Chevrolet Malibu Sedan 2007
|
||||
Chevrolet Silverado 1500 Extended Cab 2012
|
||||
Chevrolet Silverado 1500 Regular Cab 2012
|
||||
Chrysler Aspen SUV 2009
|
||||
Chrysler Sebring Convertible 2010
|
||||
Chrysler Town and Country Minivan 2012
|
||||
Chrysler 300 SRT-8 2010
|
||||
Chrysler Crossfire Convertible 2008
|
||||
Chrysler PT Cruiser Convertible 2008
|
||||
Daewoo Nubira Wagon 2002
|
||||
Dodge Caliber Wagon 2012
|
||||
Dodge Caliber Wagon 2007
|
||||
Dodge Caravan Minivan 1997
|
||||
Dodge Ram Pickup 3500 Crew Cab 2010
|
||||
Dodge Ram Pickup 3500 Quad Cab 2009
|
||||
Dodge Sprinter Cargo Van 2009
|
||||
Dodge Journey SUV 2012
|
||||
Dodge Dakota Crew Cab 2010
|
||||
Dodge Dakota Club Cab 2007
|
||||
Dodge Magnum Wagon 2008
|
||||
Dodge Challenger SRT8 2011
|
||||
Dodge Durango SUV 2012
|
||||
Dodge Durango SUV 2007
|
||||
Dodge Charger Sedan 2012
|
||||
Dodge Charger SRT-8 2009
|
||||
Eagle Talon Hatchback 1998
|
||||
FIAT 500 Abarth 2012
|
||||
FIAT 500 Convertible 2012
|
||||
Ferrari FF Coupe 2012
|
||||
Ferrari California Convertible 2012
|
||||
Ferrari 458 Italia Convertible 2012
|
||||
Ferrari 458 Italia Coupe 2012
|
||||
Fisker Karma Sedan 2012
|
||||
Ford F-450 Super Duty Crew Cab 2012
|
||||
Ford Mustang Convertible 2007
|
||||
Ford Freestar Minivan 2007
|
||||
Ford Expedition EL SUV 2009
|
||||
Ford Edge SUV 2012
|
||||
Ford Ranger SuperCab 2011
|
||||
Ford GT Coupe 2006
|
||||
Ford F-150 Regular Cab 2012
|
||||
Ford F-150 Regular Cab 2007
|
||||
Ford Focus Sedan 2007
|
||||
Ford E-Series Wagon Van 2012
|
||||
Ford Fiesta Sedan 2012
|
||||
GMC Terrain SUV 2012
|
||||
GMC Savana Van 2012
|
||||
GMC Yukon Hybrid SUV 2012
|
||||
GMC Acadia SUV 2012
|
||||
GMC Canyon Extended Cab 2012
|
||||
Geo Metro Convertible 1993
|
||||
HUMMER H3T Crew Cab 2010
|
||||
HUMMER H2 SUT Crew Cab 2009
|
||||
Honda Odyssey Minivan 2012
|
||||
Honda Odyssey Minivan 2007
|
||||
Honda Accord Coupe 2012
|
||||
Honda Accord Sedan 2012
|
||||
Hyundai Veloster Hatchback 2012
|
||||
Hyundai Santa Fe SUV 2012
|
||||
Hyundai Tucson SUV 2012
|
||||
Hyundai Veracruz SUV 2012
|
||||
Hyundai Sonata Hybrid Sedan 2012
|
||||
Hyundai Elantra Sedan 2007
|
||||
Hyundai Accent Sedan 2012
|
||||
Hyundai Genesis Sedan 2012
|
||||
Hyundai Sonata Sedan 2012
|
||||
Hyundai Elantra Touring Hatchback 2012
|
||||
Hyundai Azera Sedan 2012
|
||||
Infiniti G Coupe IPL 2012
|
||||
Infiniti QX56 SUV 2011
|
||||
Isuzu Ascender SUV 2008
|
||||
Jaguar XK XKR 2012
|
||||
Jeep Patriot SUV 2012
|
||||
Jeep Wrangler SUV 2012
|
||||
Jeep Liberty SUV 2012
|
||||
Jeep Grand Cherokee SUV 2012
|
||||
Jeep Compass SUV 2012
|
||||
Lamborghini Reventon Coupe 2008
|
||||
Lamborghini Aventador Coupe 2012
|
||||
Lamborghini Gallardo LP 570-4 Superleggera 2012
|
||||
Lamborghini Diablo Coupe 2001
|
||||
Land Rover Range Rover SUV 2012
|
||||
Land Rover LR2 SUV 2012
|
||||
Lincoln Town Car Sedan 2011
|
||||
MINI Cooper Roadster Convertible 2012
|
||||
Maybach Landaulet Convertible 2012
|
||||
Mazda Tribute SUV 2011
|
||||
McLaren MP4-12C Coupe 2012
|
||||
Mercedes-Benz 300-Class Convertible 1993
|
||||
Mercedes-Benz C-Class Sedan 2012
|
||||
Mercedes-Benz SL-Class Coupe 2009
|
||||
Mercedes-Benz E-Class Sedan 2012
|
||||
Mercedes-Benz S-Class Sedan 2012
|
||||
Mercedes-Benz Sprinter Van 2012
|
||||
Mitsubishi Lancer Sedan 2012
|
||||
Nissan Leaf Hatchback 2012
|
||||
Nissan NV Passenger Van 2012
|
||||
Nissan Juke Hatchback 2012
|
||||
Nissan 240SX Coupe 1998
|
||||
Plymouth Neon Coupe 1999
|
||||
Porsche Panamera Sedan 2012
|
||||
Ram C/V Cargo Van Minivan 2012
|
||||
Rolls-Royce Phantom Drophead Coupe Convertible 2012
|
||||
Rolls-Royce Ghost Sedan 2012
|
||||
Rolls-Royce Phantom Sedan 2012
|
||||
Scion xD Hatchback 2012
|
||||
Spyker C8 Convertible 2009
|
||||
Spyker C8 Coupe 2009
|
||||
Suzuki Aerio Sedan 2007
|
||||
Suzuki Kizashi Sedan 2012
|
||||
Suzuki SX4 Hatchback 2012
|
||||
Suzuki SX4 Sedan 2012
|
||||
Tesla Model S Sedan 2012
|
||||
Toyota Sequoia SUV 2012
|
||||
Toyota Camry Sedan 2012
|
||||
Toyota Corolla Sedan 2012
|
||||
Toyota 4Runner SUV 2012
|
||||
Volkswagen Golf Hatchback 2012
|
||||
Volkswagen Golf Hatchback 1991
|
||||
Volkswagen Beetle Hatchback 2012
|
||||
Volvo C30 Hatchback 2012
|
||||
Volvo 240 Sedan 1993
|
||||
Volvo XC90 SUV 2007
|
||||
smart fortwo Convertible 2012
|
60000
cifar100/allimages.txt
Normal file
60000
cifar100/allimages.txt
Normal file
File diff suppressed because it is too large
Load Diff
100
cifar100/labels.txt
Normal file
100
cifar100/labels.txt
Normal file
@ -0,0 +1,100 @@
|
||||
apple
|
||||
aquarium_fish
|
||||
baby
|
||||
bear
|
||||
beaver
|
||||
bed
|
||||
bee
|
||||
beetle
|
||||
bicycle
|
||||
bottle
|
||||
bowl
|
||||
boy
|
||||
bridge
|
||||
bus
|
||||
butterfly
|
||||
camel
|
||||
can
|
||||
castle
|
||||
caterpillar
|
||||
cattle
|
||||
chair
|
||||
chimpanzee
|
||||
clock
|
||||
cloud
|
||||
cockroach
|
||||
couch
|
||||
crab
|
||||
crocodile
|
||||
cup
|
||||
dinosaur
|
||||
dolphin
|
||||
elephant
|
||||
flatfish
|
||||
forest
|
||||
fox
|
||||
girl
|
||||
hamster
|
||||
house
|
||||
kangaroo
|
||||
keyboard
|
||||
lamp
|
||||
lawn_mower
|
||||
leopard
|
||||
lion
|
||||
lizard
|
||||
lobster
|
||||
man
|
||||
maple_tree
|
||||
motorcycle
|
||||
mountain
|
||||
mouse
|
||||
mushroom
|
||||
oak_tree
|
||||
orange
|
||||
orchid
|
||||
otter
|
||||
palm_tree
|
||||
pear
|
||||
pickup_truck
|
||||
pine_tree
|
||||
plain
|
||||
plate
|
||||
poppy
|
||||
porcupine
|
||||
possum
|
||||
rabbit
|
||||
raccoon
|
||||
ray
|
||||
road
|
||||
rocket
|
||||
rose
|
||||
sea
|
||||
seal
|
||||
shark
|
||||
shrew
|
||||
skunk
|
||||
skyscraper
|
||||
snail
|
||||
snake
|
||||
spider
|
||||
squirrel
|
||||
streetcar
|
||||
sunflower
|
||||
sweet_pepper
|
||||
table
|
||||
tank
|
||||
telephone
|
||||
television
|
||||
tiger
|
||||
tractor
|
||||
train
|
||||
trout
|
||||
tulip
|
||||
turtle
|
||||
wardrobe
|
||||
whale
|
||||
willow_tree
|
||||
wolf
|
||||
woman
|
||||
worm
|
60000
cifar100/unprocessed/allimages-us.txt
Normal file
60000
cifar100/unprocessed/allimages-us.txt
Normal file
File diff suppressed because it is too large
Load Diff
33
cifar100/unprocessed/processor.py
Normal file
33
cifar100/unprocessed/processor.py
Normal file
@ -0,0 +1,33 @@
|
||||
"""
|
||||
Turn a combined, unsorted all images list into a sorted list with the right paths
|
||||
"""
|
||||
|
||||
from os import listdir
|
||||
from os.path import isfile, join
|
||||
import random
|
||||
|
||||
ALLFILE="allimages.txt"
|
||||
|
||||
def main():
|
||||
with open("allimages-us.txt",'r') as source:
|
||||
|
||||
data = [ line.split(' ') for line in source ]
|
||||
data = [ (line[0].replace('fine', 'fine_comb'), line[1]) for line in data ]
|
||||
data = [ (line[0].replace('test', 'all_ims'), line[1]) for line in data ]
|
||||
data = [ (line[0].replace('train', 'all_ims'), line[1]) for line in data ]
|
||||
|
||||
data = [ (line[0].split('/'), line[1]) for line in data ]
|
||||
data.sort(key=lambda x: (x[1], x[0][-1]))
|
||||
|
||||
data = [ ('/'.join(line[0]), line[1]) for line in data ]
|
||||
data = [ ' '.join((line[0], line[1])) for line in data ]
|
||||
|
||||
for i in data[:5]:
|
||||
print(i)
|
||||
|
||||
op=open(ALLFILE,'w')
|
||||
for i in data:
|
||||
op.write(i)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
10000
cifar100/unprocessed/test.txt
Normal file
10000
cifar100/unprocessed/test.txt
Normal file
File diff suppressed because it is too large
Load Diff
50000
cifar100/unprocessed/train.txt
Normal file
50000
cifar100/unprocessed/train.txt
Normal file
File diff suppressed because it is too large
Load Diff
50
models/alexnet-template.py
Normal file
50
models/alexnet-template.py
Normal file
@ -0,0 +1,50 @@
|
||||
# Preferred settings for this model is:
|
||||
# Base Learning Rate = 0.001
|
||||
# Crop Size = 224
|
||||
|
||||
from model import Tower
|
||||
from utils import model_property
|
||||
import tensorflow as tf
|
||||
import tensorflow.contrib.slim as slim
|
||||
import utils as digits
|
||||
|
||||
|
||||
class UserModel(Tower):
|
||||
|
||||
@model_property
|
||||
def inference(self):
|
||||
x = tf.reshape(self.x, shape=[-1, self.input_shape[0], self.input_shape[1], self.input_shape[2]])
|
||||
with slim.arg_scope([slim.conv2d, slim.fully_connected],
|
||||
weights_initializer=tf.contrib.layers.xavier_initializer(),
|
||||
weights_regularizer=slim.l2_regularizer(1e-6)):
|
||||
|
||||
model = slim.conv2d(x, 96, [11, 11], 4, padding='VALID', scope='conv1')
|
||||
model = slim.max_pool2d(model, [3, 3], 2, scope='pool1')
|
||||
|
||||
model = slim.conv2d(model, 256, [5, 5], 1, scope='conv2')
|
||||
model = slim.max_pool2d(model, [3, 3], 2, scope='pool2')
|
||||
|
||||
model = slim.conv2d(model, 384, [3, 3], 1, scope='conv3')
|
||||
model = slim.conv2d(model, 384, [3, 3], 1, scope='conv4')
|
||||
model = slim.conv2d(model, 256, [3, 3], 1, scope='conv5')
|
||||
model = slim.max_pool2d(model, [3, 3], 2, scope='pool5')
|
||||
|
||||
model = slim.flatten(model)
|
||||
|
||||
model = slim.fully_connected(model, 4096, activation_fn=None, scope='fc1')
|
||||
model = slim.dropout(model, 0.5, is_training=self.is_training, scope='do1')
|
||||
|
||||
model = slim.fully_connected(model, 4096, activation_fn=None, scope='fc2')
|
||||
model = slim.dropout(model, 0.5, is_training=self.is_training, scope='do2')
|
||||
|
||||
model = slim.fully_connected(model, self.nclasses, activation_fn=None, scope='fc3')
|
||||
|
||||
return model
|
||||
|
||||
@model_property
|
||||
def loss(self):
|
||||
model = self.inference
|
||||
loss = digits.classification_loss(model, self.y)
|
||||
accuracy = digits.classification_accuracy(model, self.y)
|
||||
self.summaries.append(tf.summary.scalar(accuracy.op.name, accuracy))
|
||||
return loss
|
228
models/googlenet-template.py
Normal file
228
models/googlenet-template.py
Normal file
@ -0,0 +1,228 @@
|
||||
# Preferred settings for this model is:
|
||||
# Training epochs = 80
|
||||
# Crop Size = 224
|
||||
# Learning Rate = 0.001
|
||||
# Under advanced learning rate options:
|
||||
# Step Size = 10.0
|
||||
# Gamma = 0.96
|
||||
|
||||
# The auxillary branches as spcified in the original googlenet V1 model do exist in this implementation of
|
||||
# googlenet but it is not used. To use it, be sure to check self.is_training to ensure that it is only used
|
||||
# during training.
|
||||
|
||||
from model import Tower
|
||||
from utils import model_property
|
||||
import tensorflow as tf
|
||||
import utils as digits
|
||||
|
||||
|
||||
class UserModel(Tower):
|
||||
|
||||
all_inception_settings = {
|
||||
'3a': [[64], [96, 128], [16, 32], [32]],
|
||||
'3b': [[128], [128, 192], [32, 96], [64]],
|
||||
'4a': [[192], [96, 208], [16, 48], [64]],
|
||||
'4b': [[160], [112, 224], [24, 64], [64]],
|
||||
'4c': [[128], [128, 256], [24, 64], [64]],
|
||||
'4d': [[112], [144, 288], [32, 64], [64]],
|
||||
'4e': [[256], [160, 320], [32, 128], [128]],
|
||||
'5a': [[256], [160, 320], [32, 128], [128]],
|
||||
'5b': [[384], [192, 384], [48, 128], [128]]
|
||||
}
|
||||
|
||||
@model_property
|
||||
def inference(self):
|
||||
# rescale to proper form, really we expect 224 x 224 x 1 in HWC form
|
||||
model = tf.reshape(self.x, shape=[-1, self.input_shape[0], self.input_shape[1], self.input_shape[2]])
|
||||
|
||||
conv_7x7_2s_weight, conv_7x7_2s_bias = self.create_conv_vars([7, 7, self.input_shape[2], 64], 'conv_7x7_2s')
|
||||
model = self.conv_layer_with_relu(model, conv_7x7_2s_weight, conv_7x7_2s_bias, 2)
|
||||
|
||||
model = self.max_pool(model, 3, 2)
|
||||
|
||||
# model = tf.nn.local_response_normalization(model)
|
||||
|
||||
conv_1x1_vs_weight, conv_1x1_vs_bias = self.create_conv_vars([1, 1, 64, 64], 'conv_1x1_vs')
|
||||
model = self.conv_layer_with_relu(model, conv_1x1_vs_weight, conv_1x1_vs_bias, 1, 'VALID')
|
||||
|
||||
conv_3x3_1s_weight, conv_3x3_1s_bias = self.create_conv_vars([3, 3, 64, 192], 'conv_3x3_1s')
|
||||
model = self.conv_layer_with_relu(model, conv_3x3_1s_weight, conv_3x3_1s_bias, 1)
|
||||
|
||||
# model = tf.nn.local_response_normalization(model)
|
||||
|
||||
model = self.max_pool(model, 3, 2)
|
||||
|
||||
inception_settings_3a = InceptionSettings(192, UserModel.all_inception_settings['3a'])
|
||||
model = self.inception(model, inception_settings_3a, '3a')
|
||||
|
||||
inception_settings_3b = InceptionSettings(256, UserModel.all_inception_settings['3b'])
|
||||
model = self.inception(model, inception_settings_3b, '3b')
|
||||
|
||||
model = self.max_pool(model, 3, 2)
|
||||
|
||||
inception_settings_4a = InceptionSettings(480, UserModel.all_inception_settings['4a'])
|
||||
model = self.inception(model, inception_settings_4a, '4a')
|
||||
|
||||
# first auxiliary branch for making training faster
|
||||
# aux_branch_1 = self.auxiliary_classifier(model, 512, "aux_1")
|
||||
|
||||
inception_settings_4b = InceptionSettings(512, UserModel.all_inception_settings['4b'])
|
||||
model = self.inception(model, inception_settings_4b, '4b')
|
||||
|
||||
inception_settings_4c = InceptionSettings(512, UserModel.all_inception_settings['4c'])
|
||||
model = self.inception(model, inception_settings_4c, '4c')
|
||||
|
||||
inception_settings_4d = InceptionSettings(512, UserModel.all_inception_settings['4d'])
|
||||
model = self.inception(model, inception_settings_4d, '4d')
|
||||
|
||||
# second auxiliary branch for making training faster
|
||||
# aux_branch_2 = self.auxiliary_classifier(model, 528, "aux_2")
|
||||
|
||||
inception_settings_4e = InceptionSettings(528, UserModel.all_inception_settings['4e'])
|
||||
model = self.inception(model, inception_settings_4e, '4e')
|
||||
|
||||
model = self.max_pool(model, 3, 2)
|
||||
|
||||
inception_settings_5a = InceptionSettings(832, UserModel.all_inception_settings['5a'])
|
||||
model = self.inception(model, inception_settings_5a, '5a')
|
||||
|
||||
inception_settings_5b = InceptionSettings(832, UserModel.all_inception_settings['5b'])
|
||||
model = self.inception(model, inception_settings_5b, '5b')
|
||||
|
||||
model = self.avg_pool(model, 7, 1, 'VALID')
|
||||
|
||||
fc_weight, fc_bias = self.create_fc_vars([1024, self.nclasses], 'fc')
|
||||
model = self.fully_connect(model, fc_weight, fc_bias)
|
||||
|
||||
# if self.is_training:
|
||||
# return [aux_branch_1, aux_branch_2, model]
|
||||
|
||||
return model
|
||||
|
||||
@model_property
|
||||
def loss(self):
|
||||
model = self.inference
|
||||
loss = digits.classification_loss(model, self.y)
|
||||
accuracy = digits.classification_accuracy(model, self.y)
|
||||
self.summaries.append(tf.summary.scalar(accuracy.op.name, accuracy))
|
||||
return loss
|
||||
|
||||
def inception(self, model, inception_setting, layer_name):
|
||||
weights, biases = self.create_inception_variables(inception_setting, layer_name)
|
||||
conv_1x1 = self.conv_layer_with_relu(model, weights['conv_1x1_1'], biases['conv_1x1_1'], 1)
|
||||
|
||||
conv_3x3 = self.conv_layer_with_relu(model, weights['conv_1x1_2'], biases['conv_1x1_2'], 1)
|
||||
conv_3x3 = self.conv_layer_with_relu(conv_3x3, weights['conv_3x3'], biases['conv_3x3'], 1)
|
||||
|
||||
conv_5x5 = self.conv_layer_with_relu(model, weights['conv_1x1_3'], biases['conv_1x1_3'], 1)
|
||||
conv_5x5 = self.conv_layer_with_relu(conv_5x5, weights['conv_5x5'], biases['conv_5x5'], 1)
|
||||
|
||||
conv_pool = self.max_pool(model, 3, 1)
|
||||
conv_pool = self.conv_layer_with_relu(conv_pool, weights['conv_pool'], biases['conv_pool'], 1)
|
||||
|
||||
final_model = tf.concat([conv_1x1, conv_3x3, conv_5x5, conv_pool], 3)
|
||||
|
||||
return final_model
|
||||
|
||||
def create_inception_variables(self, inception_setting, layer_name):
|
||||
model_dim = inception_setting.model_dim
|
||||
conv_1x1_1_w, conv_1x1_1_b = self.create_conv_vars([1, 1, model_dim, inception_setting.conv_1x1_1_layers],
|
||||
layer_name + '-conv_1x1_1')
|
||||
conv_1x1_2_w, conv_1x1_2_b = self.create_conv_vars([1, 1, model_dim, inception_setting.conv_1x1_2_layers],
|
||||
layer_name + '-conv_1x1_2')
|
||||
conv_1x1_3_w, conv_1x1_3_b = self.create_conv_vars([1, 1, model_dim, inception_setting.conv_1x1_3_layers],
|
||||
layer_name + '-conv_1x1_3')
|
||||
conv_3x3_w, conv_3x3_b = self.create_conv_vars([3, 3, inception_setting.conv_1x1_2_layers,
|
||||
inception_setting.conv_3x3_layers],
|
||||
layer_name + '-conv_3x3')
|
||||
conv_5x5_w, conv_5x5_b = self.create_conv_vars([5, 5, inception_setting.conv_1x1_3_layers,
|
||||
inception_setting.conv_5x5_layers],
|
||||
layer_name + '-conv_5x5')
|
||||
conv_pool_w, conv_pool_b = self.create_conv_vars([1, 1, model_dim, inception_setting.conv_pool_layers],
|
||||
layer_name + '-conv_pool')
|
||||
|
||||
weights = {
|
||||
'conv_1x1_1': conv_1x1_1_w,
|
||||
'conv_1x1_2': conv_1x1_2_w,
|
||||
'conv_1x1_3': conv_1x1_3_w,
|
||||
'conv_3x3': conv_3x3_w,
|
||||
'conv_5x5': conv_5x5_w,
|
||||
'conv_pool': conv_pool_w
|
||||
}
|
||||
|
||||
biases = {
|
||||
'conv_1x1_1': conv_1x1_1_b,
|
||||
'conv_1x1_2': conv_1x1_2_b,
|
||||
'conv_1x1_3': conv_1x1_3_b,
|
||||
'conv_3x3': conv_3x3_b,
|
||||
'conv_5x5': conv_5x5_b,
|
||||
'conv_pool': conv_pool_b
|
||||
}
|
||||
|
||||
return weights, biases
|
||||
|
||||
def auxiliary_classifier(self, model, input_size, name):
|
||||
aux_classifier = self.avg_pool(model, 5, 3, 'VALID')
|
||||
|
||||
conv_weight, conv_bias = self.create_conv_vars([1, 1, input_size, input_size], name + '-conv_1x1')
|
||||
aux_classifier = self.conv_layer_with_relu(aux_classifier, conv_weight, conv_bias, 1)
|
||||
|
||||
fc_weight, fc_bias = self.create_fc_vars([4*4*input_size, self.nclasses], name + '-fc')
|
||||
aux_classifier = self.fully_connect(aux_classifier, fc_weight, fc_bias)
|
||||
|
||||
aux_classifier = tf.nn.dropout(aux_classifier, 0.7)
|
||||
|
||||
return aux_classifier
|
||||
|
||||
def conv_layer_with_relu(self, model, weights, biases, stride_size, padding='SAME'):
|
||||
new_model = tf.nn.conv2d(model, weights, strides=[1, stride_size, stride_size, 1], padding=padding)
|
||||
new_model = tf.nn.bias_add(new_model, biases)
|
||||
new_model = tf.nn.relu(new_model)
|
||||
return new_model
|
||||
|
||||
def max_pool(self, model, kernal_size, stride_size, padding='SAME'):
|
||||
new_model = tf.nn.max_pool(model, ksize=[1, kernal_size, kernal_size, 1],
|
||||
strides=[1, stride_size, stride_size, 1], padding=padding)
|
||||
return new_model
|
||||
|
||||
def avg_pool(self, model, kernal_size, stride_size, padding='SAME'):
|
||||
new_model = tf.nn.avg_pool(model, ksize=[1, kernal_size, kernal_size, 1],
|
||||
strides=[1, stride_size, stride_size, 1], padding=padding)
|
||||
return new_model
|
||||
|
||||
def fully_connect(self, model, weights, biases):
|
||||
fc_model = tf.reshape(model, [-1, weights.get_shape().as_list()[0]])
|
||||
fc_model = tf.matmul(fc_model, weights)
|
||||
fc_model = tf.add(fc_model, biases)
|
||||
fc_model = tf.nn.relu(fc_model)
|
||||
return fc_model
|
||||
|
||||
def create_conv_vars(self, size, name):
|
||||
weight = self.create_weight(size, name + '_W')
|
||||
bias = self.create_bias(size[3], name + '_b')
|
||||
return weight, bias
|
||||
|
||||
def create_fc_vars(self, size, name):
|
||||
weight = self.create_weight(size, name + '_W')
|
||||
bias = self.create_bias(size[1], name + '_b')
|
||||
return weight, bias
|
||||
|
||||
def create_weight(self, size, name):
|
||||
weight = tf.get_variable(name, size, initializer=tf.contrib.layers.xavier_initializer())
|
||||
return weight
|
||||
|
||||
def create_bias(self, size, name):
|
||||
bias = tf.get_variable(name, [size], initializer=tf.constant_initializer(0.2))
|
||||
return bias
|
||||
|
||||
|
||||
class InceptionSettings():
|
||||
|
||||
def __init__(self, model_dim, inception_settings):
|
||||
self.model_dim = model_dim
|
||||
self.conv_1x1_1_layers = inception_settings[0][0]
|
||||
self.conv_1x1_2_layers = inception_settings[1][0]
|
||||
self.conv_1x1_3_layers = inception_settings[2][0]
|
||||
self.conv_3x3_layers = inception_settings[1][1]
|
||||
self.conv_5x5_layers = inception_settings[2][1]
|
||||
self.conv_pool_layers = inception_settings[3][0]
|
34
models/lenet-template.py
Normal file
34
models/lenet-template.py
Normal file
@ -0,0 +1,34 @@
|
||||
from model import Tower
|
||||
from utils import model_property
|
||||
import tensorflow as tf
|
||||
import tensorflow.contrib.slim as slim
|
||||
import utils as digits
|
||||
|
||||
|
||||
class UserModel(Tower):
|
||||
|
||||
@model_property
|
||||
def inference(self):
|
||||
x = tf.reshape(self.x, shape=[-1, self.input_shape[0], self.input_shape[1], self.input_shape[2]])
|
||||
# scale (divide by MNIST std)
|
||||
x = x * 0.0125
|
||||
with slim.arg_scope([slim.conv2d, slim.fully_connected],
|
||||
weights_initializer=tf.contrib.layers.xavier_initializer(),
|
||||
weights_regularizer=slim.l2_regularizer(0.0005)):
|
||||
model = slim.conv2d(x, 20, [5, 5], padding='VALID', scope='conv1')
|
||||
model = slim.max_pool2d(model, [2, 2], padding='VALID', scope='pool1')
|
||||
model = slim.conv2d(model, 50, [5, 5], padding='VALID', scope='conv2')
|
||||
model = slim.max_pool2d(model, [2, 2], padding='VALID', scope='pool2')
|
||||
model = slim.flatten(model)
|
||||
model = slim.fully_connected(model, 500, scope='fc1')
|
||||
model = slim.dropout(model, 0.5, is_training=self.is_training, scope='do1')
|
||||
model = slim.fully_connected(model, self.nclasses, activation_fn=None, scope='fc2')
|
||||
return model
|
||||
|
||||
@model_property
|
||||
def loss(self):
|
||||
model = self.inference
|
||||
loss = digits.classification_loss(model, self.y)
|
||||
accuracy = digits.classification_accuracy(model, self.y)
|
||||
self.summaries.append(tf.summary.scalar(accuracy.op.name, accuracy))
|
||||
return loss
|
52
split.py
Executable file
52
split.py
Executable file
@ -0,0 +1,52 @@
|
||||
from os import listdir
|
||||
from os.path import isfile, join
|
||||
import random
|
||||
import sys
|
||||
|
||||
ALLFILE="allimages.txt"
|
||||
TRAINFILE="train.txt"
|
||||
VALIDFILE="val.txt"
|
||||
TESTFILE="test.txt"
|
||||
|
||||
# Change this from 1.0 to some lower fraction to subsample the data
|
||||
# e.g. 0.05 will use 5 percent of all the data
|
||||
SUBSAMP=1.0
|
||||
|
||||
def main(path, TRAINPERC, VALIDPERC):
|
||||
|
||||
with open(join(path, ALLFILE),'r') as source:
|
||||
|
||||
data = [ (random.random(), line) for line in source ]
|
||||
data.sort()
|
||||
train=open(join(path, TRAINFILE),'w')
|
||||
valid=open(join(path, VALIDFILE),'w')
|
||||
test=open(join(path, TESTFILE),'w')
|
||||
|
||||
count=len(data)
|
||||
cumlvalid=int(TRAINPERC*count)
|
||||
cumltest=cumlvalid+int(VALIDPERC*count)
|
||||
|
||||
print("Total records = %d" % count)
|
||||
print("Train %d%% = %d" % (round(TRAINPERC*100), cumlvalid) )
|
||||
print("Valid %d%% = %d" % (round(VALIDPERC*100), cumltest-cumlvalid) )
|
||||
print("Test %d%% = %d" % (round((1-TRAINPERC-VALIDPERC)*100), count-cumltest))
|
||||
|
||||
didwrite=0
|
||||
ctr=0
|
||||
for _, line in data:
|
||||
if (ctr>=cumltest):
|
||||
if (random.uniform(0,1)<SUBSAMP):
|
||||
test.write( line )
|
||||
elif (ctr>=cumlvalid):
|
||||
if (random.uniform(0,1)<SUBSAMP):
|
||||
valid.write( line )
|
||||
else:
|
||||
if (random.uniform(0,1)<SUBSAMP):
|
||||
train.write( line )
|
||||
didwrite=didwrite+1
|
||||
ctr=ctr+1
|
||||
|
||||
print('Wrote training data %d' % didwrite)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(input('Path: '), float(input('Train: ')), float(input('Val: ')))
|
Loading…
Reference in New Issue
Block a user