@@ -87,7 +87,8 @@ def create_develop_snapshot_tag(project):
8787 tags = commit .refs ("tag" )
8888 snapshot_tag = None
8989 for t in tags :
90- if re .match (t .name , "develop-.*" ):
90+ print (t )
91+ if re .match (t .get ("name" , "" ), "develop-.*" ):
9192 snapshot_tag = t
9293 break
9394
@@ -132,12 +133,12 @@ def create_snapshot(bucket: str, tag: github.GitTag, workdir: str, parallel: int
132133 branch = tag_source_branch (t .name )
133134 if not branch :
134135 LOGGER .warning (f"Skipping snapshot for { tag .name } , cannot determine base branch" )
135- return
136+ return False
136137
137138 client = s3_create_client ()
138139 if s3_object_exists (bucket , "{tag.name}/v3/layout.json" ):
139140 LOGGER .info (f"Skipping snapshot for { tag .name } as it already exists" )
140- return
141+ return True
141142
142143 gl_project = DEFAULT_GITLAB_PROJECT
143144 if isinstance (gl_project , str ):
@@ -146,12 +147,12 @@ def create_snapshot(bucket: str, tag: github.GitTag, workdir: str, parallel: int
146147 pipeline = gl_project .pipelines .list (get_all = False , per_page = 1 , sha = tag .commit .sha , ref = branch , status = "success" )
147148 if not pipeline :
148149 LOGGER .warning (f"Skipping { tag .name } : Could not find corresponding successful pipeline for { branch } " )
149- return
150+ return False
150151
151152 LOGGER .info (f"Creating snapshot for: { t .name } from { branch } :{ pipeline [0 ].id } " )
152153
153154 # Assuming all snapshots are v3 only now
154- all_specs_catalog , _ = generate_spec_catalogs_v3 (bucket , branch , workdir = workdir , include = [ "tutorial" ] )
155+ all_specs_catalog , _ = generate_spec_catalogs_v3 (bucket , branch , workdir = workdir )
155156
156157 gnu_pg_home = os .path .join (workdir , ".gnupg" )
157158 download_and_import_key (gnu_pg_home , workdir , False )
@@ -162,8 +163,6 @@ def create_snapshot(bucket: str, tag: github.GitTag, workdir: str, parallel: int
162163 continue
163164
164165 stack = j .name .replace ("-generate" , "" )
165- if stack not in ("tutorial" ):
166- continue
167166
168167 # Get the lockfile/concrete hashes to sync to snapshot mirror
169168 job = gl_project .jobs .get (j .id , lazy = True )
@@ -262,8 +261,8 @@ def dryrun_publish(spec, bucket, source, dest, force, gpg_home, workdir):
262261 continue
263262
264263 tempdir = WORKDIR or tempfile .mkdtemp ()
265- create_snapshot (args .bucket , t , tempdir )
266- # Now use publish to create the top level mirror
267- # Don't re-verify everything, it was already done by create_snapshot
268- publish (args .bucket , t .name , verify = False , workdir = tempdir )
264+ if create_snapshot (args .bucket , t , tempdir ):
265+ # Now use publish to create the top level mirror if the snapshot exists
266+ # Don't re-verify everything, it was already done by create_snapshot
267+ publish (args .bucket , t .name , verify = False , workdir = tempdir )
269268
0 commit comments