@@ -33,8 +33,16 @@ def lambda_handler(event, context):
3333 s3 .download_file (os .environ ['BUCKET_NAME' ], s3_repo_dir + '/repodata/' + f , repo .repodir + 'repodata/' + f )
3434 repo .read ()
3535 print ('Creating Metadata files' )
36- repo , cache = check_changed_files (repo , s3_repo_dir )
37- #Check if object was removed
36+ if event ['Records' ][0 ]['eventName' ].startswith ('ObjectCreated' ):
37+ print ('helpme' )
38+ repo , cache = check_changed_files (repo , s3_repo_dir , newfile = event ['Records' ][0 ]['s3' ]['object' ]['key' ])
39+ else :
40+ repo , cache = check_changed_files (repo , s3_repo_dir )
41+ #save cache to bucket
42+ s3 = boto3 .resource ('s3' )
43+ f_index_obj = s3 .Object (bucket_name = os .environ ['BUCKET_NAME' ], key = s3_repo_dir + '/repo_cache' )
44+ print ("Writing file: %s" % (str (f_index_obj )))
45+ f_index_obj .put (Body = str (json .dumps (cache )))
3846
3947 repo .save ()
4048
@@ -43,16 +51,12 @@ def lambda_handler(event, context):
4351 sign_md_file (repo , s3_repo_dir )
4452
4553 #save files to bucket
46- s3 = boto3 .resource ('s3' )
4754 for f in files :
4855 with open (repo .repodir + 'repodata/' + f , 'rb' ) as g :
4956 f_index_obj = s3 .Object (bucket_name = os .environ ['BUCKET_NAME' ], key = s3_repo_dir + '/repodata/' + f )
5057 print ("Writing file: %s" % (str (f_index_obj )))
5158 f_index_obj .put (Body = g .read (- 1 ), ACL = get_public ())
52- f_index_obj = s3 .Object (bucket_name = os .environ ['BUCKET_NAME' ], key = s3_repo_dir + '/repo_cache' )
53- print ("Writing file: %s" % (str (f_index_obj )))
54- f_index_obj .put (Body = str (json .dumps (cache )))
55-
59+
5660 #Let us clean up
5761 shutil .rmtree (repo .repodir )
5862 if os .path .exists ('/tmp/gpgdocs' ):
@@ -114,14 +118,35 @@ def get_cache(repo, s3_repo_dir):
114118 cache = {}
115119 return cache
116120
117- def check_changed_files (repo , s3_repo_dir ):
121+ def remove_overwritten_file_from_cache (cache , newfile , s3_repo_dir , repo ):
122+ """
123+ remove pkg from metadata and repo
124+ """
125+ fname = newfile [len (s3_repo_dir ):] # '/filename.rpm' - without path
126+ print ('file %s has been overwritten and will be removed from md and repo' % (fname ))
127+ pkg_id = cache [fname ]
128+ del cache [fname ]
129+
130+ # save cache in case new event occurs
131+ s3 = boto3 .resource ('s3' )
132+ f_index_obj = s3 .Object (bucket_name = os .environ ['BUCKET_NAME' ], key = s3_repo_dir + '/repo_cache' )
133+ f_index_obj .put (Body = str (json .dumps (cache )))
134+
135+ repo .remove_package (pkg_id )
136+ return cache
137+
138+
139+ def check_changed_files (repo , s3_repo_dir ,newfile = None ):
118140 """
119141 check if there are any new files in bucket or any deleted files
120142 """
121143 print ("Checking for changes : %s" % (s3_repo_dir ))
122144 cache = get_cache (repo , s3_repo_dir )
123145 s3 = boto3 .resource ('s3' )
124146 files = []
147+ #if file was overwriten and is in repocache then remove it from cache, so next for loop will add back the new
148+ if newfile != None and newfile [len (s3_repo_dir ):] in cache :
149+ cache = remove_overwritten_file_from_cache (cache , newfile , s3_repo_dir , repo )
125150 #cycle through all objects ending with .rpm in REPO_DIR and check if they are already in repodata, if not add them
126151 for obj in s3 .Bucket (os .environ ['BUCKET_NAME' ]).objects .filter (Prefix = s3_repo_dir ):
127152 files .append (obj .key )
0 commit comments