|
113 | 113 |
|
114 | 114 | else:
|
115 | 115 | will_download = 0
|
116 |
| - |
| 116 | + |
117 | 117 | # there were uploads. determine should any be downloaded
|
118 | 118 | LOG.info(str(len(potential_downloads)) + ' new uploads for: ' + line)
|
119 | 119 | current = 0
|
|
131 | 131 | LOG.debug(current + 'skipped (previously downloaded)')
|
132 | 132 |
|
133 | 133 | # skip if already in download directory
|
134 |
| - elif os.path.isfile(filename): |
| 134 | + elif os.path.isfile(CONFIG['download_directory'] + filename): |
135 | 135 | links_on_disk += 1
|
136 | 136 | LOG.debug(current + 'skipped (already in downloads directory')
|
137 | 137 |
|
|
151 | 151 | '\t(cached: ' + str(links_in_cache) + \
|
152 | 152 | ', existing: ' + str(links_on_disk) + ')\n')
|
153 | 153 |
|
154 |
| -print '' |
155 |
| -LOG.info('starting download of ' + str(len(URL_AND_NAME_LIST)) + ' files') |
156 | 154 | if URL_AND_NAME_LIST:
|
157 |
| - multi_download(URL_AND_NAME_LIST, CONFIG['parallel_downloads']) |
| 155 | + print '' |
| 156 | + LOG.info('starting download of ' + str(len(URL_AND_NAME_LIST)) + ' files') |
| 157 | + multi_download(URL_AND_NAME_LIST, CONFIG['parallel_downloads']) |
| 158 | +else: |
| 159 | + LOG.info('nothing to download') |
| 160 | + |
158 | 161 |
|
159 | 162 | ##############################################################################
|
160 | 163 | # WRAP-UP
|
|
163 | 166 | # - set last run to yesterday (see FAQ for why it isn't today)
|
164 | 167 | ##############################################################################
|
165 | 168 | pickle.dump(CACHE, open('.cache', 'wb'), pickle.HIGHEST_PROTOCOL)
|
166 |
| -LOG.info('successfully downloaded ' + str(TOTAL_DOWNLOADS) + ' files') |
| 169 | +if URL_AND_NAME_LIST: |
| 170 | + LOG.info('successfully downloaded ' + str(TOTAL_DOWNLOADS) + ' files') |
167 | 171 | YESTERDAY = datetime.date.fromordinal(datetime.date.today().toordinal()-1)
|
168 | 172 | CONFIG['last_run'] = YESTERDAY.strftime(default.DATETIME_FMT)
|
169 | 173 |
|
|
0 commit comments