Skip to content

Commit e1d2331

Browse files
authored
Merge pull request #71 from git-pkgs/fix-composer-dist-urls
Fix composer dist URL rewriting and browse source
2 parents 33d99e3 + e36a924 commit e1d2331

File tree

5 files changed

+442
-37
lines changed

5 files changed

+442
-37
lines changed

internal/handler/composer.go

Lines changed: 31 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import (
55
"fmt"
66
"io"
77
"net/http"
8+
"path"
89
"strings"
910
"time"
1011

@@ -182,9 +183,10 @@ func expandMinifiedVersions(versionList []any) []any {
182183
}
183184

184185
// Merge inherited fields into a new map, then overlay current fields.
186+
// Deep copy values to avoid shared references between versions.
185187
merged := make(map[string]any, len(inherited)+len(vmap))
186188
for k, val := range inherited {
187-
merged[k] = val
189+
merged[k] = deepCopyValue(val)
188190
}
189191
for k, val := range vmap {
190192
merged[k] = val
@@ -199,6 +201,26 @@ func expandMinifiedVersions(versionList []any) []any {
199201
return expanded
200202
}
201203

204+
// deepCopyValue returns a deep copy of JSON-like values (maps, slices, scalars).
205+
func deepCopyValue(v any) any {
206+
switch val := v.(type) {
207+
case map[string]any:
208+
m := make(map[string]any, len(val))
209+
for k, v := range val {
210+
m[k] = deepCopyValue(v)
211+
}
212+
return m
213+
case []any:
214+
s := make([]any, len(val))
215+
for i, v := range val {
216+
s[i] = deepCopyValue(v)
217+
}
218+
return s
219+
default:
220+
return v
221+
}
222+
}
223+
202224
// filterAndRewriteVersions applies cooldown filtering and rewrites dist URLs
203225
// for a single package's version list.
204226
func (h *ComposerHandler) filterAndRewriteVersions(packageName string, versionList []any) []any {
@@ -266,6 +288,14 @@ func (h *ComposerHandler) rewriteDistURL(vmap map[string]any, packageName, versi
266288
filename = url[idx+1:]
267289
}
268290

291+
// GitHub zipball URLs end with a bare commit hash (no extension).
292+
// Append .zip so the archives library can detect the format.
293+
if path.Ext(filename) == "" {
294+
if distType, _ := dist["type"].(string); distType == "zip" {
295+
filename += ".zip"
296+
}
297+
}
298+
269299
parts := strings.SplitN(packageName, "/", vendorPackageParts)
270300
if len(parts) == vendorPackageParts {
271301
newURL := fmt.Sprintf("%s/composer/files/%s/%s/%s/%s",

internal/handler/composer_test.go

Lines changed: 146 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ package handler
33
import (
44
"encoding/json"
55
"log/slog"
6+
"strings"
67
"testing"
78
"time"
89

@@ -245,6 +246,151 @@ func TestComposerRewriteMetadataCooldownPreservesNames(t *testing.T) {
245246
}
246247
}
247248

249+
func TestComposerRewriteDistURLGitHubZipball(t *testing.T) {
250+
// GitHub zipball URLs end with a bare commit hash, no file extension.
251+
// The proxy must produce a filename with .zip extension so that the
252+
// archives library can detect the format when browsing source.
253+
h := &ComposerHandler{
254+
proxy: testProxy(),
255+
proxyURL: "http://localhost:8080",
256+
}
257+
258+
vmap := map[string]any{
259+
"version": "v7.4.8",
260+
"dist": map[string]any{
261+
"url": "https://api.github.com/repos/symfony/asset/zipball/d2e2f014ccd6ec9fae8dbe6336a4164346a2a856",
262+
"type": "zip",
263+
"shasum": "",
264+
"reference": "d2e2f014ccd6ec9fae8dbe6336a4164346a2a856",
265+
},
266+
}
267+
268+
h.rewriteDistURL(vmap, "symfony/asset", "v7.4.8")
269+
270+
dist := vmap["dist"].(map[string]any)
271+
url := dist["url"].(string)
272+
273+
// The rewritten URL's filename must have a .zip extension
274+
if !strings.HasSuffix(url, ".zip") {
275+
t.Errorf("rewritten dist URL filename has no .zip extension: %s", url)
276+
}
277+
}
278+
279+
func TestComposerRewriteMetadataGitHubZipballFilenames(t *testing.T) {
280+
// End-to-end: metadata with GitHub zipball URLs should produce
281+
// download URLs that end in .zip so browse source can open them.
282+
h := &ComposerHandler{
283+
proxy: testProxy(),
284+
proxyURL: "http://localhost:8080",
285+
}
286+
287+
input := `{
288+
"packages": {
289+
"symfony/config": [
290+
{
291+
"version": "v7.4.8",
292+
"dist": {
293+
"url": "https://api.github.com/repos/symfony/config/zipball/c7369cc1da250fcbfe0c5a9d109e419661549c39",
294+
"type": "zip",
295+
"reference": "c7369cc1da250fcbfe0c5a9d109e419661549c39"
296+
}
297+
}
298+
]
299+
}
300+
}`
301+
302+
output, err := h.rewriteMetadata([]byte(input))
303+
if err != nil {
304+
t.Fatalf("rewriteMetadata failed: %v", err)
305+
}
306+
307+
var result map[string]any
308+
if err := json.Unmarshal(output, &result); err != nil {
309+
t.Fatalf("failed to parse output: %v", err)
310+
}
311+
312+
packages := result["packages"].(map[string]any)
313+
versions := packages["symfony/config"].([]any)
314+
v := versions[0].(map[string]any)
315+
dist := v["dist"].(map[string]any)
316+
url := dist["url"].(string)
317+
318+
if !strings.HasSuffix(url, ".zip") {
319+
t.Errorf("rewritten URL should end in .zip, got %s", url)
320+
}
321+
}
322+
323+
func TestComposerExpandMinifiedSharedDistReferences(t *testing.T) {
324+
// When a minified version inherits the dist field from a previous version
325+
// (i.e. it doesn't include its own dist), expanding + rewriting must not
326+
// corrupt the dist URLs via shared map references.
327+
h := &ComposerHandler{
328+
proxy: testProxy(),
329+
proxyURL: "http://localhost:8080",
330+
}
331+
332+
// In this minified payload, v5.3.0 does NOT include a dist field,
333+
// so it inherits v5.4.0's dist. After expansion and URL rewriting,
334+
// each version must have its own correct dist URL.
335+
input := `{
336+
"minified": "composer/2.0",
337+
"packages": {
338+
"vendor/pkg": [
339+
{
340+
"name": "vendor/pkg",
341+
"version": "5.4.0",
342+
"dist": {
343+
"url": "https://api.github.com/repos/vendor/pkg/zipball/aaa111",
344+
"type": "zip",
345+
"reference": "aaa111"
346+
}
347+
},
348+
{
349+
"version": "5.3.0"
350+
}
351+
]
352+
}
353+
}`
354+
355+
output, err := h.rewriteMetadata([]byte(input))
356+
if err != nil {
357+
t.Fatalf("rewriteMetadata failed: %v", err)
358+
}
359+
360+
var result map[string]any
361+
if err := json.Unmarshal(output, &result); err != nil {
362+
t.Fatalf("failed to parse output: %v", err)
363+
}
364+
365+
packages := result["packages"].(map[string]any)
366+
versions := packages["vendor/pkg"].([]any)
367+
if len(versions) != 2 {
368+
t.Fatalf("expected 2 versions, got %d", len(versions))
369+
}
370+
371+
v1 := versions[0].(map[string]any)
372+
v2 := versions[1].(map[string]any)
373+
374+
dist1 := v1["dist"].(map[string]any)
375+
dist2 := v2["dist"].(map[string]any)
376+
377+
url1 := dist1["url"].(string)
378+
url2 := dist2["url"].(string)
379+
380+
// Each version must have its own URL with its own version in the path
381+
if !strings.Contains(url1, "/5.4.0/") {
382+
t.Errorf("v5.4.0 dist URL should contain /5.4.0/, got %s", url1)
383+
}
384+
if !strings.Contains(url2, "/5.3.0/") {
385+
t.Errorf("v5.3.0 dist URL should contain /5.3.0/, got %s", url2)
386+
}
387+
388+
// The two URLs must be different
389+
if url1 == url2 {
390+
t.Errorf("both versions have the same dist URL (shared reference bug): %s", url1)
391+
}
392+
}
393+
248394
func TestComposerRewriteMetadataCooldown(t *testing.T) {
249395
now := time.Now()
250396
old := now.Add(-10 * 24 * time.Hour).Format(time.RFC3339)

internal/server/browse.go

Lines changed: 72 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package server
22

33
import (
4+
"bytes"
45
"encoding/json"
56
"fmt"
67
"io"
@@ -17,17 +18,75 @@ import (
1718

1819
const contentTypePlainText = "text/plain; charset=utf-8"
1920

20-
// getStripPrefix returns the path prefix to strip for a given ecosystem.
21-
// npm packages wrap content in a "package/" directory.
22-
func getStripPrefix(ecosystem string) string {
23-
switch ecosystem {
24-
case "npm":
25-
return "package/"
26-
default:
21+
// archiveFilename returns a filename suitable for archive format detection.
22+
// Some ecosystems (e.g. composer) store artifacts with bare hash filenames
23+
// that have no extension. This adds .zip when the original has no extension
24+
// and the content is likely a zip archive.
25+
func archiveFilename(filename string) string {
26+
if path.Ext(filename) == "" {
27+
return filename + ".zip"
28+
}
29+
return filename
30+
}
31+
32+
// detectSingleRootDir returns the single top-level directory name if all files
33+
// in the archive live under one common directory (e.g. GitHub zipballs use
34+
// "repo-hash/"). Returns "" if there's no single root or the archive is flat.
35+
func detectSingleRootDir(reader archives.Reader) string {
36+
files, err := reader.List()
37+
if err != nil || len(files) == 0 {
38+
return ""
39+
}
40+
41+
var root string
42+
for _, f := range files {
43+
parts := strings.SplitN(f.Path, "/", 2) //nolint:mnd // split into dir + rest
44+
if len(parts) == 0 {
45+
continue
46+
}
47+
dir := parts[0]
48+
if root == "" {
49+
root = dir
50+
} else if dir != root {
51+
return ""
52+
}
53+
}
54+
55+
if root == "" {
2756
return ""
2857
}
58+
return root + "/"
59+
}
60+
61+
// openArchive opens a cached artifact as an archive reader, auto-detecting
62+
// and stripping a single top-level directory prefix (like GitHub zipballs).
63+
// For npm, the hardcoded "package/" prefix takes precedence.
64+
func openArchive(filename string, content io.Reader, ecosystem string) (archives.Reader, error) { //nolint:ireturn // wraps multiple archive implementations
65+
fname := archiveFilename(filename)
66+
67+
// npm always uses package/ prefix
68+
if ecosystem == "npm" {
69+
return archives.OpenWithPrefix(fname, content, "package/")
70+
}
71+
72+
// Read content into memory so we can scan then wrap with prefix
73+
data, err := io.ReadAll(content)
74+
if err != nil {
75+
return nil, fmt.Errorf("reading artifact: %w", err)
76+
}
77+
78+
// Open once to detect root prefix
79+
probe, err := archives.Open(fname, bytes.NewReader(data))
80+
if err != nil {
81+
return nil, err
82+
}
83+
prefix := detectSingleRootDir(probe)
84+
_ = probe.Close()
85+
86+
return archives.OpenWithPrefix(fname, bytes.NewReader(data), prefix)
2987
}
3088

89+
3190
// BrowseListResponse contains the file listing for a directory in an archives.
3291
type BrowseListResponse struct {
3392
Path string `json:"path"`
@@ -174,9 +233,8 @@ func (s *Server) browseList(w http.ResponseWriter, r *http.Request, ecosystem, n
174233
}
175234
defer func() { _ = artifactReader.Close() }()
176235

177-
// Open archive with appropriate prefix stripping
178-
stripPrefix := getStripPrefix(ecosystem)
179-
archiveReader, err := archives.OpenWithPrefix(cachedArtifact.Filename, artifactReader, stripPrefix)
236+
// Open archive with auto-detected prefix stripping
237+
archiveReader, err := openArchive(cachedArtifact.Filename, artifactReader, ecosystem)
180238
if err != nil {
181239
s.logger.Error("failed to open archive", "error", err, "filename", cachedArtifact.Filename)
182240
http.Error(w, "failed to open archive", http.StatusInternalServerError)
@@ -269,9 +327,8 @@ func (s *Server) browseFile(w http.ResponseWriter, r *http.Request, ecosystem, n
269327
}
270328
defer func() { _ = artifactReader.Close() }()
271329

272-
// Open archive with appropriate prefix stripping
273-
stripPrefix := getStripPrefix(ecosystem)
274-
archiveReader, err := archives.OpenWithPrefix(cachedArtifact.Filename, artifactReader, stripPrefix)
330+
// Open archive with auto-detected prefix stripping
331+
archiveReader, err := openArchive(cachedArtifact.Filename, artifactReader, ecosystem)
275332
if err != nil {
276333
s.logger.Error("failed to open archive", "error", err, "filename", cachedArtifact.Filename)
277334
http.Error(w, "failed to open archive", http.StatusInternalServerError)
@@ -484,17 +541,15 @@ func (s *Server) compareDiff(w http.ResponseWriter, r *http.Request, ecosystem,
484541
}
485542
defer func() { _ = toReader.Close() }()
486543

487-
stripPrefix := getStripPrefix(ecosystem)
488-
489-
fromArchive, err := archives.OpenWithPrefix(fromArtifact.Filename, fromReader, stripPrefix)
544+
fromArchive, err := openArchive(fromArtifact.Filename, fromReader, ecosystem)
490545
if err != nil {
491546
s.logger.Error("failed to open from archive", "error", err)
492547
http.Error(w, "failed to open from archive", http.StatusInternalServerError)
493548
return
494549
}
495550
defer func() { _ = fromArchive.Close() }()
496551

497-
toArchive, err := archives.OpenWithPrefix(toArtifact.Filename, toReader, stripPrefix)
552+
toArchive, err := openArchive(toArtifact.Filename, toReader, ecosystem)
498553
if err != nil {
499554
s.logger.Error("failed to open to archive", "error", err)
500555
http.Error(w, "failed to open to archive", http.StatusInternalServerError)

0 commit comments

Comments
 (0)