From ab0a760a956d3870537844bde5058a8fe959572e Mon Sep 17 00:00:00 2001 From: Michael Penkov Date: Sun, 21 Aug 2022 12:12:51 +0900 Subject: [PATCH] update README.rst with new public S3 URLs --- README.rst | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/README.rst b/README.rst index 0b631690..64435232 100644 --- a/README.rst +++ b/README.rst @@ -348,15 +348,14 @@ Since going over all (or select) keys in an S3 bucket is a very common operation .. code-block:: python >>> from smart_open import s3 - >>> # get data corresponding to 2010 and later under "silo-open-data/annual/monthly_rain" >>> # we use workers=1 for reproducibility; you should use as many workers as you have cores >>> bucket = 'silo-open-data' - >>> prefix = 'annual/monthly_rain/' + >>> prefix = 'Official/annual/monthly_rain/' >>> for key, content in s3.iter_bucket(bucket, prefix=prefix, accept_key=lambda key: '/201' in key, workers=1, key_limit=3): ... print(key, round(len(content) / 2**20)) - annual/monthly_rain/2010.monthly_rain.nc 13 - annual/monthly_rain/2011.monthly_rain.nc 13 - annual/monthly_rain/2012.monthly_rain.nc 13 + Official/annual/monthly_rain/2010.monthly_rain.nc 13 + Official/annual/monthly_rain/2011.monthly_rain.nc 13 + Official/annual/monthly_rain/2012.monthly_rain.nc 13 GCS Credentials ---------------