mirror of
https://github.com/denoland/std.git
synced 2024-11-21 12:40:03 +00:00
BREAKING(archive): remove std/archive
package (#6185)
This commit is contained in:
parent
90e4aefe7c
commit
ece0256dcf
482
.github/dependency_graph.svg
vendored
482
.github/dependency_graph.svg
vendored
@ -4,457 +4,463 @@
|
||||
<!-- Generated by graphviz version 2.44.0 (20200518.1210)
|
||||
-->
|
||||
<!-- Title: std_deps Pages: 1 -->
|
||||
<svg width="2437pt" height="404pt"
|
||||
viewBox="0.00 0.00 2437.00 404.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<svg width="2379pt" height="404pt"
|
||||
viewBox="0.00 0.00 2379.00 404.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 400)">
|
||||
<title>std_deps</title>
|
||||
<polygon fill="white" stroke="transparent" points="-4,4 -4,-400 2433,-400 2433,4 -4,4"/>
|
||||
<!-- archive -->
|
||||
<g id="node1" class="node">
|
||||
<title>archive</title>
|
||||
<ellipse fill="none" stroke="black" cx="306" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="306" y="-247.8" font-family="Times,serif" font-size="14.00">archive</text>
|
||||
</g>
|
||||
<!-- io -->
|
||||
<g id="node2" class="node">
|
||||
<title>io</title>
|
||||
<ellipse fill="none" stroke="black" cx="306" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="306" y="-139.8" font-family="Times,serif" font-size="14.00">io</text>
|
||||
</g>
|
||||
<!-- archive->io -->
|
||||
<g id="edge1" class="edge">
|
||||
<title>archive->io</title>
|
||||
<path fill="none" stroke="black" d="M306,-216C306,-207.85 306,-199.06 306,-190.54"/>
|
||||
<polygon fill="black" stroke="black" points="309.5,-190.33 306,-180.33 302.5,-190.33 309.5,-190.33"/>
|
||||
</g>
|
||||
<!-- bytes -->
|
||||
<g id="node6" class="node">
|
||||
<title>bytes</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="261" cy="-36" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="261" y="-31.8" font-family="Times,serif" font-size="14.00">bytes</text>
|
||||
</g>
|
||||
<!-- io->bytes -->
|
||||
<g id="edge16" class="edge">
|
||||
<title>io->bytes</title>
|
||||
<path fill="none" stroke="black" d="M292.25,-110.62C288.02,-100.65 283.29,-89.51 278.83,-78.99"/>
|
||||
<polygon fill="black" stroke="black" points="281.95,-77.38 274.82,-69.55 275.5,-80.12 281.95,-77.38"/>
|
||||
</g>
|
||||
<polygon fill="white" stroke="transparent" points="-4,4 -4,-400 2375,-400 2375,4 -4,4"/>
|
||||
<!-- assert -->
|
||||
<g id="node3" class="node">
|
||||
<g id="node1" class="node">
|
||||
<title>assert</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1320" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1320" y="-139.8" font-family="Times,serif" font-size="14.00">assert</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="68" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="68" y="-139.8" font-family="Times,serif" font-size="14.00">assert</text>
|
||||
</g>
|
||||
<!-- internal -->
|
||||
<g id="node4" class="node">
|
||||
<g id="node2" class="node">
|
||||
<title>internal</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1320" cy="-36" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1320" y="-31.8" font-family="Times,serif" font-size="14.00">internal</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="68" cy="-36" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="68" y="-31.8" font-family="Times,serif" font-size="14.00">internal</text>
|
||||
</g>
|
||||
<!-- assert->internal -->
|
||||
<g id="edge2" class="edge">
|
||||
<g id="edge1" class="edge">
|
||||
<title>assert->internal</title>
|
||||
<path fill="none" stroke="black" d="M1320,-108C1320,-99.85 1320,-91.06 1320,-82.54"/>
|
||||
<polygon fill="black" stroke="black" points="1323.5,-82.33 1320,-72.33 1316.5,-82.33 1323.5,-82.33"/>
|
||||
<path fill="none" stroke="black" d="M68,-108C68,-99.85 68,-91.06 68,-82.54"/>
|
||||
<polygon fill="black" stroke="black" points="71.5,-82.33 68,-72.33 64.5,-82.33 71.5,-82.33"/>
|
||||
</g>
|
||||
<!-- async -->
|
||||
<g id="node5" class="node">
|
||||
<g id="node3" class="node">
|
||||
<title>async</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1064" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1064" y="-139.8" font-family="Times,serif" font-size="14.00">async</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="324" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="324" y="-139.8" font-family="Times,serif" font-size="14.00">async</text>
|
||||
</g>
|
||||
<!-- bytes -->
|
||||
<g id="node4" class="node">
|
||||
<title>bytes</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1146" cy="-36" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1146" y="-31.8" font-family="Times,serif" font-size="14.00">bytes</text>
|
||||
</g>
|
||||
<!-- cache -->
|
||||
<g id="node7" class="node">
|
||||
<g id="node5" class="node">
|
||||
<title>cache</title>
|
||||
<ellipse fill="none" stroke="black" cx="787" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="787" y="-355.8" font-family="Times,serif" font-size="14.00">cache</text>
|
||||
<ellipse fill="none" stroke="black" cx="1191" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1191" y="-355.8" font-family="Times,serif" font-size="14.00">cache</text>
|
||||
</g>
|
||||
<!-- cbor -->
|
||||
<g id="node6" class="node">
|
||||
<title>cbor</title>
|
||||
<ellipse fill="none" stroke="black" cx="1217" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1217" y="-247.8" font-family="Times,serif" font-size="14.00">cbor</text>
|
||||
</g>
|
||||
<!-- cbor->bytes -->
|
||||
<g id="edge3" class="edge">
|
||||
<title>cbor->bytes</title>
|
||||
<path fill="none" stroke="black" d="M1193.02,-224.95C1182.71,-212.29 1171.55,-196.3 1165,-180 1152.48,-148.85 1147.86,-111.12 1146.3,-82.23"/>
|
||||
<polygon fill="black" stroke="black" points="1149.79,-81.86 1145.86,-72.02 1142.8,-82.17 1149.79,-81.86"/>
|
||||
</g>
|
||||
<!-- streams -->
|
||||
<g id="node7" class="node">
|
||||
<title>streams</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1082" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1082" y="-139.8" font-family="Times,serif" font-size="14.00">streams</text>
|
||||
</g>
|
||||
<!-- cbor->streams -->
|
||||
<g id="edge2" class="edge">
|
||||
<title>cbor->streams</title>
|
||||
<path fill="none" stroke="black" d="M1189.03,-229.04C1168.39,-212.83 1140.15,-190.66 1117.85,-173.15"/>
|
||||
<polygon fill="black" stroke="black" points="1119.96,-170.36 1109.94,-166.94 1115.64,-175.86 1119.96,-170.36"/>
|
||||
</g>
|
||||
<!-- streams->bytes -->
|
||||
<g id="edge24" class="edge">
|
||||
<title>streams->bytes</title>
|
||||
<path fill="none" stroke="black" d="M1100.17,-112.91C1107.13,-101.38 1115.17,-88.07 1122.55,-75.84"/>
|
||||
<polygon fill="black" stroke="black" points="1125.61,-77.54 1127.79,-67.17 1119.62,-73.92 1125.61,-77.54"/>
|
||||
</g>
|
||||
<!-- cli -->
|
||||
<g id="node8" class="node">
|
||||
<title>cli</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="704" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="704" y="-139.8" font-family="Times,serif" font-size="14.00">cli</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="722" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="722" y="-139.8" font-family="Times,serif" font-size="14.00">cli</text>
|
||||
</g>
|
||||
<!-- collections -->
|
||||
<g id="node9" class="node">
|
||||
<title>collections</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1448" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1448" y="-139.8" font-family="Times,serif" font-size="14.00">collections</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1390" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1390" y="-139.8" font-family="Times,serif" font-size="14.00">collections</text>
|
||||
</g>
|
||||
<!-- crypto -->
|
||||
<g id="node10" class="node">
|
||||
<title>crypto</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="396" cy="-36" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="396" y="-31.8" font-family="Times,serif" font-size="14.00">crypto</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1300" cy="-36" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1300" y="-31.8" font-family="Times,serif" font-size="14.00">crypto</text>
|
||||
</g>
|
||||
<!-- csv -->
|
||||
<g id="node11" class="node">
|
||||
<title>csv</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="36" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="36" y="-247.8" font-family="Times,serif" font-size="14.00">csv</text>
|
||||
</g>
|
||||
<!-- streams -->
|
||||
<g id="node12" class="node">
|
||||
<title>streams</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="126" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="126" y="-139.8" font-family="Times,serif" font-size="14.00">streams</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1127" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1127" y="-247.8" font-family="Times,serif" font-size="14.00">csv</text>
|
||||
</g>
|
||||
<!-- csv->streams -->
|
||||
<g id="edge3" class="edge">
|
||||
<g id="edge4" class="edge">
|
||||
<title>csv->streams</title>
|
||||
<path fill="none" stroke="black" d="M58.94,-223.98C70.37,-210.52 84.31,-194.1 96.52,-179.72"/>
|
||||
<polygon fill="black" stroke="black" points="99.29,-181.86 103.1,-171.97 93.96,-177.33 99.29,-181.86"/>
|
||||
</g>
|
||||
<!-- streams->bytes -->
|
||||
<g id="edge23" class="edge">
|
||||
<title>streams->bytes</title>
|
||||
<path fill="none" stroke="black" d="M153.97,-121.04C174.61,-104.83 202.85,-82.66 225.15,-65.15"/>
|
||||
<polygon fill="black" stroke="black" points="227.36,-67.86 233.06,-58.94 223.04,-62.36 227.36,-67.86"/>
|
||||
<path fill="none" stroke="black" d="M1113.25,-218.62C1109.02,-208.65 1104.29,-197.51 1099.83,-186.99"/>
|
||||
<polygon fill="black" stroke="black" points="1102.95,-185.38 1095.82,-177.55 1096.5,-188.12 1102.95,-185.38"/>
|
||||
</g>
|
||||
<!-- data-\nstructures -->
|
||||
<g id="node13" class="node">
|
||||
<g id="node12" class="node">
|
||||
<title>data-\nstructures</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1154" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1154" y="-148.2" font-family="Times,serif" font-size="14.00">data-</text>
|
||||
<text text-anchor="middle" x="1154" y="-131.4" font-family="Times,serif" font-size="14.00">structures</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="158" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="158" y="-148.2" font-family="Times,serif" font-size="14.00">data-</text>
|
||||
<text text-anchor="middle" x="158" y="-131.4" font-family="Times,serif" font-size="14.00">structures</text>
|
||||
</g>
|
||||
<!-- datetime -->
|
||||
<g id="node14" class="node">
|
||||
<g id="node13" class="node">
|
||||
<title>datetime</title>
|
||||
<ellipse fill="none" stroke="black" cx="1583" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1583" y="-355.8" font-family="Times,serif" font-size="14.00">datetime</text>
|
||||
<ellipse fill="none" stroke="black" cx="1525" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1525" y="-355.8" font-family="Times,serif" font-size="14.00">datetime</text>
|
||||
</g>
|
||||
<!-- dotenv -->
|
||||
<g id="node15" class="node">
|
||||
<g id="node14" class="node">
|
||||
<title>dotenv</title>
|
||||
<ellipse fill="none" stroke="black" cx="1673" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1673" y="-355.8" font-family="Times,serif" font-size="14.00">dotenv</text>
|
||||
<ellipse fill="none" stroke="black" cx="1615" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1615" y="-355.8" font-family="Times,serif" font-size="14.00">dotenv</text>
|
||||
</g>
|
||||
<!-- encoding -->
|
||||
<g id="node16" class="node">
|
||||
<g id="node15" class="node">
|
||||
<title>encoding</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="576" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="576" y="-139.8" font-family="Times,serif" font-size="14.00">encoding</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="812" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="812" y="-139.8" font-family="Times,serif" font-size="14.00">encoding</text>
|
||||
</g>
|
||||
<!-- expect -->
|
||||
<g id="node17" class="node">
|
||||
<g id="node16" class="node">
|
||||
<title>expect</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1339" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1339" y="-247.8" font-family="Times,serif" font-size="14.00">expect</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="36" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="36" y="-247.8" font-family="Times,serif" font-size="14.00">expect</text>
|
||||
</g>
|
||||
<!-- expect->assert -->
|
||||
<g id="edge4" class="edge">
|
||||
<g id="edge5" class="edge">
|
||||
<title>expect->assert</title>
|
||||
<path fill="none" stroke="black" d="M1332.78,-216.29C1331.25,-207.78 1329.6,-198.55 1328,-189.65"/>
|
||||
<polygon fill="black" stroke="black" points="1331.44,-189.01 1326.24,-179.79 1324.55,-190.25 1331.44,-189.01"/>
|
||||
<path fill="none" stroke="black" d="M46.21,-217.17C48.96,-208.09 51.96,-198.13 54.84,-188.6"/>
|
||||
<polygon fill="black" stroke="black" points="58.23,-189.46 57.77,-178.87 51.53,-187.43 58.23,-189.46"/>
|
||||
</g>
|
||||
<!-- expect->internal -->
|
||||
<g id="edge5" class="edge">
|
||||
<g id="edge6" class="edge">
|
||||
<title>expect->internal</title>
|
||||
<path fill="none" stroke="black" d="M1353.51,-218.79C1358.15,-206.86 1362.68,-193.05 1365,-180 1370.59,-148.49 1373.92,-138.73 1365,-108 1361.41,-95.61 1354.93,-83.32 1347.99,-72.53"/>
|
||||
<polygon fill="black" stroke="black" points="1350.87,-70.54 1342.37,-64.22 1345.08,-74.47 1350.87,-70.54"/>
|
||||
<path fill="none" stroke="black" d="M26.08,-217.39C19,-187.98 12.44,-144.39 23,-108 26.59,-95.61 33.07,-83.32 40.01,-72.53"/>
|
||||
<polygon fill="black" stroke="black" points="42.92,-74.47 45.63,-64.22 37.13,-70.54 42.92,-74.47"/>
|
||||
</g>
|
||||
<!-- fmt -->
|
||||
<g id="node18" class="node">
|
||||
<g id="node17" class="node">
|
||||
<title>fmt</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="486" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="486" y="-139.8" font-family="Times,serif" font-size="14.00">fmt</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="902" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="902" y="-139.8" font-family="Times,serif" font-size="14.00">fmt</text>
|
||||
</g>
|
||||
<!-- front-\nmatter -->
|
||||
<g id="node19" class="node">
|
||||
<g id="node18" class="node">
|
||||
<title>front-\nmatter</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1493" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1493" y="-364.2" font-family="Times,serif" font-size="14.00">front-</text>
|
||||
<text text-anchor="middle" x="1493" y="-347.4" font-family="Times,serif" font-size="14.00">matter</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1435" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1435" y="-364.2" font-family="Times,serif" font-size="14.00">front-</text>
|
||||
<text text-anchor="middle" x="1435" y="-347.4" font-family="Times,serif" font-size="14.00">matter</text>
|
||||
</g>
|
||||
<!-- toml -->
|
||||
<g id="node20" class="node">
|
||||
<g id="node19" class="node">
|
||||
<title>toml</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1448" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1448" y="-247.8" font-family="Times,serif" font-size="14.00">toml</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1390" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1390" y="-247.8" font-family="Times,serif" font-size="14.00">toml</text>
|
||||
</g>
|
||||
<!-- front-\nmatter->toml -->
|
||||
<g id="edge6" class="edge">
|
||||
<g id="edge7" class="edge">
|
||||
<title>front-\nmatter->toml</title>
|
||||
<path fill="none" stroke="black" d="M1479.25,-326.62C1475.02,-316.65 1470.29,-305.51 1465.83,-294.99"/>
|
||||
<polygon fill="black" stroke="black" points="1468.95,-293.38 1461.82,-285.55 1462.5,-296.12 1468.95,-293.38"/>
|
||||
<path fill="none" stroke="black" d="M1421.25,-326.62C1417.02,-316.65 1412.29,-305.51 1407.83,-294.99"/>
|
||||
<polygon fill="black" stroke="black" points="1410.95,-293.38 1403.82,-285.55 1404.5,-296.12 1410.95,-293.38"/>
|
||||
</g>
|
||||
<!-- yaml -->
|
||||
<g id="node21" class="node">
|
||||
<g id="node20" class="node">
|
||||
<title>yaml</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1538" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1538" y="-247.8" font-family="Times,serif" font-size="14.00">yaml</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1480" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1480" y="-247.8" font-family="Times,serif" font-size="14.00">yaml</text>
|
||||
</g>
|
||||
<!-- front-\nmatter->yaml -->
|
||||
<g id="edge7" class="edge">
|
||||
<g id="edge8" class="edge">
|
||||
<title>front-\nmatter->yaml</title>
|
||||
<path fill="none" stroke="black" d="M1506.75,-326.62C1510.98,-316.65 1515.71,-305.51 1520.17,-294.99"/>
|
||||
<polygon fill="black" stroke="black" points="1523.5,-296.12 1524.18,-285.55 1517.05,-293.38 1523.5,-296.12"/>
|
||||
<path fill="none" stroke="black" d="M1448.75,-326.62C1452.98,-316.65 1457.71,-305.51 1462.17,-294.99"/>
|
||||
<polygon fill="black" stroke="black" points="1465.5,-296.12 1466.18,-285.55 1459.05,-293.38 1465.5,-296.12"/>
|
||||
</g>
|
||||
<!-- toml->collections -->
|
||||
<g id="edge31" class="edge">
|
||||
<g id="edge32" class="edge">
|
||||
<title>toml->collections</title>
|
||||
<path fill="none" stroke="black" d="M1448,-216C1448,-207.85 1448,-199.06 1448,-190.54"/>
|
||||
<polygon fill="black" stroke="black" points="1451.5,-190.33 1448,-180.33 1444.5,-190.33 1451.5,-190.33"/>
|
||||
<path fill="none" stroke="black" d="M1390,-216C1390,-207.85 1390,-199.06 1390,-190.54"/>
|
||||
<polygon fill="black" stroke="black" points="1393.5,-190.33 1390,-180.33 1386.5,-190.33 1393.5,-190.33"/>
|
||||
</g>
|
||||
<!-- fs -->
|
||||
<g id="node22" class="node">
|
||||
<g id="node21" class="node">
|
||||
<title>fs</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="884" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="884" y="-139.8" font-family="Times,serif" font-size="14.00">fs</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="452" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="452" y="-139.8" font-family="Times,serif" font-size="14.00">fs</text>
|
||||
</g>
|
||||
<!-- path -->
|
||||
<g id="node23" class="node">
|
||||
<g id="node22" class="node">
|
||||
<title>path</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="884" cy="-36" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="884" y="-31.8" font-family="Times,serif" font-size="14.00">path</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="388" cy="-36" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="388" y="-31.8" font-family="Times,serif" font-size="14.00">path</text>
|
||||
</g>
|
||||
<!-- fs->path -->
|
||||
<g id="edge8" class="edge">
|
||||
<g id="edge9" class="edge">
|
||||
<title>fs->path</title>
|
||||
<path fill="none" stroke="black" d="M884,-108C884,-99.85 884,-91.06 884,-82.54"/>
|
||||
<polygon fill="black" stroke="black" points="887.5,-82.33 884,-72.33 880.5,-82.33 887.5,-82.33"/>
|
||||
<path fill="none" stroke="black" d="M433.83,-112.91C426.87,-101.38 418.83,-88.07 411.45,-75.84"/>
|
||||
<polygon fill="black" stroke="black" points="414.38,-73.92 406.21,-67.17 408.39,-77.54 414.38,-73.92"/>
|
||||
</g>
|
||||
<!-- html -->
|
||||
<g id="node24" class="node">
|
||||
<g id="node23" class="node">
|
||||
<title>html</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1763" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1763" y="-355.8" font-family="Times,serif" font-size="14.00">html</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1705" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1705" y="-355.8" font-family="Times,serif" font-size="14.00">html</text>
|
||||
</g>
|
||||
<!-- http -->
|
||||
<g id="node25" class="node">
|
||||
<g id="node24" class="node">
|
||||
<title>http</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="640" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="640" y="-247.8" font-family="Times,serif" font-size="14.00">http</text>
|
||||
</g>
|
||||
<!-- http->cli -->
|
||||
<g id="edge13" class="edge">
|
||||
<title>http->cli</title>
|
||||
<path fill="none" stroke="black" d="M658.17,-220.91C665.13,-209.38 673.17,-196.07 680.55,-183.84"/>
|
||||
<polygon fill="black" stroke="black" points="683.61,-185.54 685.79,-175.17 677.62,-181.92 683.61,-185.54"/>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="722" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="722" y="-247.8" font-family="Times,serif" font-size="14.00">http</text>
|
||||
</g>
|
||||
<!-- http->streams -->
|
||||
<g id="edge12" class="edge">
|
||||
<g id="edge13" class="edge">
|
||||
<title>http->streams</title>
|
||||
<path fill="none" stroke="black" d="M606.61,-238.25C585.29,-230.62 556.88,-221.39 531,-216 373.57,-183.24 320.31,-239.7 171,-180 167.48,-178.59 164.02,-176.83 160.66,-174.84"/>
|
||||
<polygon fill="black" stroke="black" points="162.34,-171.75 152.07,-169.15 158.47,-177.59 162.34,-171.75"/>
|
||||
<path fill="none" stroke="black" d="M754.77,-236.32C771.59,-229.28 792.58,-221.26 812,-216 909.74,-189.5 944.12,-220.35 1037,-180 1040.47,-178.49 1043.91,-176.65 1047.25,-174.62"/>
|
||||
<polygon fill="black" stroke="black" points="1049.47,-177.34 1055.81,-168.85 1045.56,-171.54 1049.47,-177.34"/>
|
||||
</g>
|
||||
<!-- http->cli -->
|
||||
<g id="edge14" class="edge">
|
||||
<title>http->cli</title>
|
||||
<path fill="none" stroke="black" d="M722,-216C722,-207.85 722,-199.06 722,-190.54"/>
|
||||
<polygon fill="black" stroke="black" points="725.5,-190.33 722,-180.33 718.5,-190.33 725.5,-190.33"/>
|
||||
</g>
|
||||
<!-- http->encoding -->
|
||||
<g id="edge9" class="edge">
|
||||
<g id="edge10" class="edge">
|
||||
<title>http->encoding</title>
|
||||
<path fill="none" stroke="black" d="M621.83,-220.91C614.87,-209.38 606.83,-196.07 599.45,-183.84"/>
|
||||
<polygon fill="black" stroke="black" points="602.38,-181.92 594.21,-175.17 596.39,-185.54 602.38,-181.92"/>
|
||||
<path fill="none" stroke="black" d="M744.94,-223.98C756.37,-210.52 770.31,-194.1 782.52,-179.72"/>
|
||||
<polygon fill="black" stroke="black" points="785.29,-181.86 789.1,-171.97 779.96,-177.33 785.29,-181.86"/>
|
||||
</g>
|
||||
<!-- http->fmt -->
|
||||
<g id="edge14" class="edge">
|
||||
<g id="edge15" class="edge">
|
||||
<title>http->fmt</title>
|
||||
<path fill="none" stroke="black" d="M609.47,-232.08C587.36,-218.14 556.93,-198.48 531,-180 528,-177.86 524.93,-175.61 521.86,-173.32"/>
|
||||
<polygon fill="black" stroke="black" points="523.89,-170.47 513.81,-167.21 519.66,-176.04 523.89,-170.47"/>
|
||||
<path fill="none" stroke="black" d="M754.54,-235.9C782.46,-222.47 823.39,-201.67 857,-180 860.09,-178.01 863.24,-175.85 866.35,-173.63"/>
|
||||
<polygon fill="black" stroke="black" points="868.51,-176.39 874.48,-167.64 864.35,-170.76 868.51,-176.39"/>
|
||||
</g>
|
||||
<!-- http->path -->
|
||||
<g id="edge10" class="edge">
|
||||
<g id="edge11" class="edge">
|
||||
<title>http->path</title>
|
||||
<path fill="none" stroke="black" d="M634.93,-216.28C632.31,-184.47 633.87,-137.92 659,-108 703.12,-55.47 785.8,-41.29 837.71,-37.76"/>
|
||||
<polygon fill="black" stroke="black" points="838.19,-41.23 847.98,-37.17 837.79,-34.24 838.19,-41.23"/>
|
||||
<path fill="none" stroke="black" d="M686.02,-247.97C611.56,-240.93 444.35,-220.86 407,-180 383.16,-153.93 379.83,-113.17 381.64,-81.83"/>
|
||||
<polygon fill="black" stroke="black" points="385.15,-81.84 382.42,-71.6 378.17,-81.3 385.15,-81.84"/>
|
||||
</g>
|
||||
<!-- media-\ntypes -->
|
||||
<g id="node26" class="node">
|
||||
<g id="node25" class="node">
|
||||
<title>media-\ntypes</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="794" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="794" y="-148.2" font-family="Times,serif" font-size="14.00">media-</text>
|
||||
<text text-anchor="middle" x="794" y="-131.4" font-family="Times,serif" font-size="14.00">types</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="542" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="542" y="-148.2" font-family="Times,serif" font-size="14.00">media-</text>
|
||||
<text text-anchor="middle" x="542" y="-131.4" font-family="Times,serif" font-size="14.00">types</text>
|
||||
</g>
|
||||
<!-- http->media-\ntypes -->
|
||||
<g id="edge11" class="edge">
|
||||
<g id="edge12" class="edge">
|
||||
<title>http->media-\ntypes</title>
|
||||
<path fill="none" stroke="black" d="M670.53,-232.08C692.64,-218.14 723.07,-198.48 749,-180 752,-177.86 755.07,-175.61 758.14,-173.32"/>
|
||||
<polygon fill="black" stroke="black" points="760.34,-176.04 766.19,-167.21 756.11,-170.47 760.34,-176.04"/>
|
||||
<path fill="none" stroke="black" d="M689.46,-235.9C661.54,-222.47 620.61,-201.67 587,-180 583.91,-178.01 580.76,-175.85 577.65,-173.63"/>
|
||||
<polygon fill="black" stroke="black" points="579.65,-170.76 569.52,-167.64 575.49,-176.39 579.65,-170.76"/>
|
||||
</g>
|
||||
<!-- net -->
|
||||
<g id="node27" class="node">
|
||||
<g id="node26" class="node">
|
||||
<title>net</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="974" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="974" y="-139.8" font-family="Times,serif" font-size="14.00">net</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="632" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="632" y="-139.8" font-family="Times,serif" font-size="14.00">net</text>
|
||||
</g>
|
||||
<!-- http->net -->
|
||||
<g id="edge15" class="edge">
|
||||
<g id="edge16" class="edge">
|
||||
<title>http->net</title>
|
||||
<path fill="none" stroke="black" d="M675.88,-247.08C732.18,-239.88 843.67,-221.32 929,-180 932.31,-178.4 935.61,-176.52 938.83,-174.48"/>
|
||||
<polygon fill="black" stroke="black" points="940.86,-177.34 947.13,-168.8 936.9,-171.56 940.86,-177.34"/>
|
||||
<path fill="none" stroke="black" d="M699.06,-223.98C687.63,-210.52 673.69,-194.1 661.48,-179.72"/>
|
||||
<polygon fill="black" stroke="black" points="664.04,-177.33 654.9,-171.97 658.71,-181.86 664.04,-177.33"/>
|
||||
</g>
|
||||
<!-- ini -->
|
||||
<g id="node28" class="node">
|
||||
<g id="node27" class="node">
|
||||
<title>ini</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1853" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1853" y="-355.8" font-family="Times,serif" font-size="14.00">ini</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1795" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1795" y="-355.8" font-family="Times,serif" font-size="14.00">ini</text>
|
||||
</g>
|
||||
<!-- io -->
|
||||
<g id="node28" class="node">
|
||||
<title>io</title>
|
||||
<ellipse fill="none" stroke="black" cx="992" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="992" y="-139.8" font-family="Times,serif" font-size="14.00">io</text>
|
||||
</g>
|
||||
<!-- io->bytes -->
|
||||
<g id="edge17" class="edge">
|
||||
<title>io->bytes</title>
|
||||
<path fill="none" stroke="black" d="M1019.81,-120.79C1025.44,-116.46 1031.36,-112.02 1037,-108 1059.69,-91.83 1085.83,-74.75 1106.82,-61.4"/>
|
||||
<polygon fill="black" stroke="black" points="1108.9,-64.23 1115.47,-55.92 1105.15,-58.31 1108.9,-64.23"/>
|
||||
</g>
|
||||
<!-- json -->
|
||||
<g id="node29" class="node">
|
||||
<title>json</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="126" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="126" y="-247.8" font-family="Times,serif" font-size="14.00">json</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="947" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="947" y="-247.8" font-family="Times,serif" font-size="14.00">json</text>
|
||||
</g>
|
||||
<!-- json->streams -->
|
||||
<g id="edge17" class="edge">
|
||||
<g id="edge18" class="edge">
|
||||
<title>json->streams</title>
|
||||
<path fill="none" stroke="black" d="M126,-216C126,-207.85 126,-199.06 126,-190.54"/>
|
||||
<polygon fill="black" stroke="black" points="129.5,-190.33 126,-180.33 122.5,-190.33 129.5,-190.33"/>
|
||||
<path fill="none" stroke="black" d="M974.97,-229.04C995.61,-212.83 1023.85,-190.66 1046.15,-173.15"/>
|
||||
<polygon fill="black" stroke="black" points="1048.36,-175.86 1054.06,-166.94 1044.04,-170.36 1048.36,-175.86"/>
|
||||
</g>
|
||||
<!-- jsonc -->
|
||||
<g id="node30" class="node">
|
||||
<title>jsonc</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="126" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="126" y="-355.8" font-family="Times,serif" font-size="14.00">jsonc</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="947" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="947" y="-355.8" font-family="Times,serif" font-size="14.00">jsonc</text>
|
||||
</g>
|
||||
<!-- jsonc->json -->
|
||||
<g id="edge18" class="edge">
|
||||
<g id="edge19" class="edge">
|
||||
<title>jsonc->json</title>
|
||||
<path fill="none" stroke="black" d="M126,-324C126,-315.85 126,-307.06 126,-298.54"/>
|
||||
<polygon fill="black" stroke="black" points="129.5,-298.33 126,-288.33 122.5,-298.33 129.5,-298.33"/>
|
||||
<path fill="none" stroke="black" d="M947,-324C947,-315.85 947,-307.06 947,-298.54"/>
|
||||
<polygon fill="black" stroke="black" points="950.5,-298.33 947,-288.33 943.5,-298.33 950.5,-298.33"/>
|
||||
</g>
|
||||
<!-- log -->
|
||||
<g id="node31" class="node">
|
||||
<title>log</title>
|
||||
<ellipse fill="none" stroke="black" cx="486" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="486" y="-247.8" font-family="Times,serif" font-size="14.00">log</text>
|
||||
</g>
|
||||
<!-- log->io -->
|
||||
<g id="edge20" class="edge">
|
||||
<title>log->io</title>
|
||||
<path fill="none" stroke="black" d="M453.46,-235.9C425.54,-222.47 384.61,-201.67 351,-180 347.91,-178.01 344.76,-175.85 341.65,-173.63"/>
|
||||
<polygon fill="black" stroke="black" points="343.65,-170.76 333.52,-167.64 339.49,-176.39 343.65,-170.76"/>
|
||||
<ellipse fill="none" stroke="black" cx="857" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="857" y="-247.8" font-family="Times,serif" font-size="14.00">log</text>
|
||||
</g>
|
||||
<!-- log->fmt -->
|
||||
<g id="edge19" class="edge">
|
||||
<g id="edge20" class="edge">
|
||||
<title>log->fmt</title>
|
||||
<path fill="none" stroke="black" d="M486,-216C486,-207.85 486,-199.06 486,-190.54"/>
|
||||
<polygon fill="black" stroke="black" points="489.5,-190.33 486,-180.33 482.5,-190.33 489.5,-190.33"/>
|
||||
<path fill="none" stroke="black" d="M870.75,-218.62C874.98,-208.65 879.71,-197.51 884.17,-186.99"/>
|
||||
<polygon fill="black" stroke="black" points="887.5,-188.12 888.18,-177.55 881.05,-185.38 887.5,-188.12"/>
|
||||
</g>
|
||||
<!-- log->fs -->
|
||||
<g id="edge21" class="edge">
|
||||
<g id="edge22" class="edge">
|
||||
<title>log->fs</title>
|
||||
<path fill="none" stroke="black" d="M519.48,-238.67C540.84,-231.2 569.25,-221.99 595,-216 701.77,-191.16 738.2,-223.07 839,-180 842.48,-178.51 845.93,-176.69 849.27,-174.66"/>
|
||||
<polygon fill="black" stroke="black" points="851.48,-177.39 857.83,-168.91 847.58,-171.58 851.48,-177.39"/>
|
||||
<path fill="none" stroke="black" d="M824.28,-236.1C807.48,-229.01 786.48,-221.01 767,-216 649.75,-185.85 608.66,-226.79 497,-180 493.51,-178.54 490.06,-176.73 486.71,-174.71"/>
|
||||
<polygon fill="black" stroke="black" points="488.4,-171.63 478.14,-168.98 484.51,-177.45 488.4,-171.63"/>
|
||||
</g>
|
||||
<!-- log->io -->
|
||||
<g id="edge21" class="edge">
|
||||
<title>log->io</title>
|
||||
<path fill="none" stroke="black" d="M884.97,-229.04C905.61,-212.83 933.85,-190.66 956.15,-173.15"/>
|
||||
<polygon fill="black" stroke="black" points="958.36,-175.86 964.06,-166.94 954.04,-170.36 958.36,-175.86"/>
|
||||
</g>
|
||||
<!-- msgpack -->
|
||||
<g id="node32" class="node">
|
||||
<title>msgpack</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="216" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="216" y="-139.8" font-family="Times,serif" font-size="14.00">msgpack</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1210" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1210" y="-139.8" font-family="Times,serif" font-size="14.00">msgpack</text>
|
||||
</g>
|
||||
<!-- msgpack->bytes -->
|
||||
<g id="edge22" class="edge">
|
||||
<g id="edge23" class="edge">
|
||||
<title>msgpack->bytes</title>
|
||||
<path fill="none" stroke="black" d="M229.75,-110.62C233.98,-100.65 238.71,-89.51 243.17,-78.99"/>
|
||||
<polygon fill="black" stroke="black" points="246.5,-80.12 247.18,-69.55 240.05,-77.38 246.5,-80.12"/>
|
||||
<path fill="none" stroke="black" d="M1191.83,-112.91C1184.87,-101.38 1176.83,-88.07 1169.45,-75.84"/>
|
||||
<polygon fill="black" stroke="black" points="1172.38,-73.92 1164.21,-67.17 1166.39,-77.54 1172.38,-73.92"/>
|
||||
</g>
|
||||
<!-- random -->
|
||||
<g id="node33" class="node">
|
||||
<title>random</title>
|
||||
<ellipse fill="none" stroke="black" cx="1943" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1943" y="-355.8" font-family="Times,serif" font-size="14.00">random</text>
|
||||
<ellipse fill="none" stroke="black" cx="1885" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1885" y="-355.8" font-family="Times,serif" font-size="14.00">random</text>
|
||||
</g>
|
||||
<!-- regexp -->
|
||||
<g id="node34" class="node">
|
||||
<title>regexp</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="2033" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="2033" y="-355.8" font-family="Times,serif" font-size="14.00">regexp</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1975" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1975" y="-355.8" font-family="Times,serif" font-size="14.00">regexp</text>
|
||||
</g>
|
||||
<!-- semver -->
|
||||
<g id="node35" class="node">
|
||||
<title>semver</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="2123" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="2123" y="-355.8" font-family="Times,serif" font-size="14.00">semver</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="2065" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="2065" y="-355.8" font-family="Times,serif" font-size="14.00">semver</text>
|
||||
</g>
|
||||
<!-- tar -->
|
||||
<g id="node36" class="node">
|
||||
<title>tar</title>
|
||||
<ellipse fill="none" stroke="black" cx="216" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="216" y="-247.8" font-family="Times,serif" font-size="14.00">tar</text>
|
||||
<ellipse fill="none" stroke="black" cx="1037" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1037" y="-247.8" font-family="Times,serif" font-size="14.00">tar</text>
|
||||
</g>
|
||||
<!-- tar->streams -->
|
||||
<g id="edge24" class="edge">
|
||||
<g id="edge25" class="edge">
|
||||
<title>tar->streams</title>
|
||||
<path fill="none" stroke="black" d="M193.06,-223.98C181.63,-210.52 167.69,-194.1 155.48,-179.72"/>
|
||||
<polygon fill="black" stroke="black" points="158.04,-177.33 148.9,-171.97 152.71,-181.86 158.04,-177.33"/>
|
||||
<path fill="none" stroke="black" d="M1050.75,-218.62C1054.98,-208.65 1059.71,-197.51 1064.17,-186.99"/>
|
||||
<polygon fill="black" stroke="black" points="1067.5,-188.12 1068.18,-177.55 1061.05,-185.38 1067.5,-188.12"/>
|
||||
</g>
|
||||
<!-- testing -->
|
||||
<g id="node37" class="node">
|
||||
<title>testing</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1186" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1186" y="-247.8" font-family="Times,serif" font-size="14.00">testing</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="241" cy="-252" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="241" y="-247.8" font-family="Times,serif" font-size="14.00">testing</text>
|
||||
</g>
|
||||
<!-- testing->assert -->
|
||||
<g id="edge25" class="edge">
|
||||
<g id="edge27" class="edge">
|
||||
<title>testing->assert</title>
|
||||
<path fill="none" stroke="black" d="M1213.76,-229.04C1234.02,-213.01 1261.67,-191.15 1283.68,-173.73"/>
|
||||
<polygon fill="black" stroke="black" points="1286.14,-176.25 1291.81,-167.3 1281.8,-170.76 1286.14,-176.25"/>
|
||||
<path fill="none" stroke="black" d="M209.17,-235.1C182.77,-221.5 144.59,-200.87 113,-180 109.93,-177.97 106.8,-175.79 103.7,-173.55"/>
|
||||
<polygon fill="black" stroke="black" points="105.71,-170.69 95.59,-167.53 101.53,-176.31 105.71,-170.69"/>
|
||||
</g>
|
||||
<!-- testing->internal -->
|
||||
<g id="edge28" class="edge">
|
||||
<g id="edge26" class="edge">
|
||||
<title>testing->internal</title>
|
||||
<path fill="none" stroke="black" d="M1205.49,-221.45C1213.77,-208.75 1223.46,-193.72 1232,-180 1251.69,-148.35 1255.47,-139.74 1275,-108 1281.57,-97.32 1288.8,-85.81 1295.51,-75.23"/>
|
||||
<polygon fill="black" stroke="black" points="1298.58,-76.92 1300.99,-66.6 1292.68,-73.16 1298.58,-76.92"/>
|
||||
<path fill="none" stroke="black" d="M239.49,-216.02C236.46,-184.69 227.74,-139.03 203,-108 180.01,-79.17 142.08,-60.69 112.27,-49.84"/>
|
||||
<polygon fill="black" stroke="black" points="113.1,-46.42 102.51,-46.45 110.81,-53.04 113.1,-46.42"/>
|
||||
</g>
|
||||
<!-- testing->async -->
|
||||
<g id="edge30" class="edge">
|
||||
<g id="edge31" class="edge">
|
||||
<title>testing->async</title>
|
||||
<path fill="none" stroke="black" d="M1159.23,-227.74C1141.35,-212.2 1117.67,-191.63 1098.4,-174.89"/>
|
||||
<polygon fill="black" stroke="black" points="1100.56,-172.12 1090.71,-168.21 1095.96,-177.41 1100.56,-172.12"/>
|
||||
<path fill="none" stroke="black" d="M262.81,-223.15C272.93,-210.23 285.08,-194.71 295.87,-180.92"/>
|
||||
<polygon fill="black" stroke="black" points="298.64,-183.06 302.05,-173.03 293.13,-178.75 298.64,-183.06"/>
|
||||
</g>
|
||||
<!-- testing->data-\nstructures -->
|
||||
<g id="edge29" class="edge">
|
||||
<g id="edge30" class="edge">
|
||||
<title>testing->data-\nstructures</title>
|
||||
<path fill="none" stroke="black" d="M1175.79,-217.17C1173.04,-208.09 1170.04,-198.13 1167.16,-188.6"/>
|
||||
<polygon fill="black" stroke="black" points="1170.47,-187.43 1164.23,-178.87 1163.77,-189.46 1170.47,-187.43"/>
|
||||
<path fill="none" stroke="black" d="M219.19,-223.15C209.07,-210.23 196.92,-194.71 186.13,-180.92"/>
|
||||
<polygon fill="black" stroke="black" points="188.87,-178.75 179.95,-173.03 183.36,-183.06 188.87,-178.75"/>
|
||||
</g>
|
||||
<!-- testing->fs -->
|
||||
<g id="edge27" class="edge">
|
||||
<g id="edge29" class="edge">
|
||||
<title>testing->fs</title>
|
||||
<path fill="none" stroke="black" d="M1150.33,-245.68C1099.19,-237.08 1003.05,-217.18 929,-180 925.71,-178.35 922.43,-176.44 919.22,-174.38"/>
|
||||
<polygon fill="black" stroke="black" points="921.16,-171.46 910.94,-168.66 917.18,-177.22 921.16,-171.46"/>
|
||||
<path fill="none" stroke="black" d="M273.05,-234.9C309.95,-216.36 370.64,-185.87 410.97,-165.61"/>
|
||||
<polygon fill="black" stroke="black" points="412.63,-168.69 420,-161.08 409.49,-162.44 412.63,-168.69"/>
|
||||
</g>
|
||||
<!-- testing->path -->
|
||||
<g id="edge26" class="edge">
|
||||
<g id="edge28" class="edge">
|
||||
<title>testing->path</title>
|
||||
<path fill="none" stroke="black" d="M1200.62,-218.9C1212.6,-187.22 1223.91,-139.15 1199,-108 1165.83,-66.52 1009.17,-47.55 930.02,-40.49"/>
|
||||
<polygon fill="black" stroke="black" points="930.11,-36.98 919.84,-39.61 929.5,-43.96 930.11,-36.98"/>
|
||||
<path fill="none" stroke="black" d="M243.28,-215.94C246.82,-184.99 255.88,-139.94 279,-108 295.82,-84.77 322.78,-67.11 345.67,-55.22"/>
|
||||
<polygon fill="black" stroke="black" points="347.48,-58.23 354.87,-50.63 344.36,-51.96 347.48,-58.23"/>
|
||||
</g>
|
||||
<!-- text -->
|
||||
<g id="node38" class="node">
|
||||
<title>text</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="2213" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="2213" y="-355.8" font-family="Times,serif" font-size="14.00">text</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="2155" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="2155" y="-355.8" font-family="Times,serif" font-size="14.00">text</text>
|
||||
</g>
|
||||
<!-- ulid -->
|
||||
<g id="node39" class="node">
|
||||
<title>ulid</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="2303" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="2303" y="-355.8" font-family="Times,serif" font-size="14.00">ulid</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="2245" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="2245" y="-355.8" font-family="Times,serif" font-size="14.00">ulid</text>
|
||||
</g>
|
||||
<!-- uuid -->
|
||||
<g id="node40" class="node">
|
||||
<title>uuid</title>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="396" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="396" y="-139.8" font-family="Times,serif" font-size="14.00">uuid</text>
|
||||
<ellipse fill="lightgreen" stroke="black" cx="1300" cy="-144" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="1300" y="-139.8" font-family="Times,serif" font-size="14.00">uuid</text>
|
||||
</g>
|
||||
<!-- uuid->bytes -->
|
||||
<g id="edge32" class="edge">
|
||||
<g id="edge33" class="edge">
|
||||
<title>uuid->bytes</title>
|
||||
<path fill="none" stroke="black" d="M368.03,-121.04C347.39,-104.83 319.15,-82.66 296.85,-65.15"/>
|
||||
<polygon fill="black" stroke="black" points="298.96,-62.36 288.94,-58.94 294.64,-67.86 298.96,-62.36"/>
|
||||
<path fill="none" stroke="black" d="M1272.19,-120.79C1266.56,-116.46 1260.64,-112.02 1255,-108 1232.31,-91.83 1206.17,-74.75 1185.18,-61.4"/>
|
||||
<polygon fill="black" stroke="black" points="1186.85,-58.31 1176.53,-55.92 1183.1,-64.23 1186.85,-58.31"/>
|
||||
</g>
|
||||
<!-- uuid->crypto -->
|
||||
<g id="edge33" class="edge">
|
||||
<g id="edge34" class="edge">
|
||||
<title>uuid->crypto</title>
|
||||
<path fill="none" stroke="black" d="M396,-108C396,-99.85 396,-91.06 396,-82.54"/>
|
||||
<polygon fill="black" stroke="black" points="399.5,-82.33 396,-72.33 392.5,-82.33 399.5,-82.33"/>
|
||||
<path fill="none" stroke="black" d="M1300,-108C1300,-99.85 1300,-91.06 1300,-82.54"/>
|
||||
<polygon fill="black" stroke="black" points="1303.5,-82.33 1300,-72.33 1296.5,-82.33 1303.5,-82.33"/>
|
||||
</g>
|
||||
<!-- webgpu -->
|
||||
<g id="node41" class="node">
|
||||
<title>webgpu</title>
|
||||
<ellipse fill="none" stroke="black" cx="2393" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="2393" y="-355.8" font-family="Times,serif" font-size="14.00">webgpu</text>
|
||||
<ellipse fill="none" stroke="black" cx="2335" cy="-360" rx="36" ry="36"/>
|
||||
<text text-anchor="middle" x="2335" y="-355.8" font-family="Times,serif" font-size="14.00">webgpu</text>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 21 KiB After Width: | Height: | Size: 22 KiB |
3
.github/labeler.yml
vendored
3
.github/labeler.yml
vendored
@ -1,6 +1,3 @@
|
||||
archive:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: archive/**
|
||||
assert:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: assert/**
|
||||
|
1
.github/workflows/title.yml
vendored
1
.github/workflows/title.yml
vendored
@ -35,7 +35,6 @@ jobs:
|
||||
revert
|
||||
# This should be kept up-to-date with the current packages list
|
||||
scopes: |
|
||||
archive(/unstable)?
|
||||
assert(/unstable)?
|
||||
async(/unstable)?
|
||||
bytes(/unstable)?
|
||||
|
@ -18,7 +18,6 @@ documentation:
|
||||
|
||||
| Package | Latest version |
|
||||
| ------------------------------------------------------ | ----------------------------------------------------------------------------------------- |
|
||||
| [archive](https://jsr.io/@std/archive) | [![JSR](https://jsr.io/badges/@std/archive)](https://jsr.io/@std/archive) |
|
||||
| [assert](https://jsr.io/@std/assert) | [![JSR](https://jsr.io/badges/@std/assert)](https://jsr.io/@std/assert) |
|
||||
| [async](https://jsr.io/@std/async) | [![JSR](https://jsr.io/badges/@std/async)](https://jsr.io/@std/async) |
|
||||
| [bytes](https://jsr.io/@std/bytes) | [![JSR](https://jsr.io/badges/@std/bytes)](https://jsr.io/@std/bytes) |
|
||||
|
@ -35,7 +35,6 @@ type Dep = {
|
||||
state: DepState;
|
||||
};
|
||||
type Mod =
|
||||
| "archive"
|
||||
| "assert"
|
||||
| "async"
|
||||
| "bytes"
|
||||
@ -79,7 +78,6 @@ type Mod =
|
||||
| "yaml";
|
||||
|
||||
const ENTRYPOINTS: Record<Mod, string[]> = {
|
||||
archive: ["mod.ts"],
|
||||
assert: ["mod.ts"],
|
||||
async: ["mod.ts"],
|
||||
bytes: ["mod.ts"],
|
||||
|
@ -30,7 +30,6 @@ type DocNodeWithJsDoc<T = DocNodeBase> = T & {
|
||||
};
|
||||
|
||||
const ENTRY_POINTS = [
|
||||
"../archive/mod.ts",
|
||||
"../assert/mod.ts",
|
||||
"../assert/unstable_never.ts",
|
||||
"../async/mod.ts",
|
||||
|
@ -1,206 +0,0 @@
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
import type { Reader } from "@std/io/types";
|
||||
|
||||
/**
|
||||
* Base interface for {@linkcode TarMeta}.
|
||||
*
|
||||
* @deprecated Use {@linkcode https://jsr.io/@std/tar | @std/tar} instead.
|
||||
* `@std/archive` will be removed in the future.
|
||||
*
|
||||
* @experimental **UNSTABLE**: New API, yet to be vetted.
|
||||
*/
|
||||
export interface TarInfo {
|
||||
/**
|
||||
* The underlying raw `st_mode` bits that contain the standard Unix
|
||||
* permissions for this file/directory.
|
||||
*/
|
||||
fileMode?: number;
|
||||
/**
|
||||
* Data modification time of the file at the time it was archived. It
|
||||
* represents the integer number of seconds since January 1, 1970, 00:00 UTC.
|
||||
*/
|
||||
mtime?: number;
|
||||
/**
|
||||
* Numeric user ID of the file owner. This is ignored if the operating system
|
||||
* does not support numeric user IDs.
|
||||
*/
|
||||
uid?: number;
|
||||
/**
|
||||
* Numeric group ID of the file owner. This is ignored if the operating
|
||||
* system does not support numeric group IDs.
|
||||
*/
|
||||
gid?: number;
|
||||
/** The name of the file owner. */
|
||||
owner?: string;
|
||||
/** The group that the file owner belongs to. */
|
||||
group?: string;
|
||||
/**
|
||||
* The type of file archived.
|
||||
*
|
||||
* @see {@linkcode FileTypes}
|
||||
*/
|
||||
type?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Base interface for {@linkcode TarMetaWithLinkName}.
|
||||
*
|
||||
* @deprecated Use {@linkcode https://jsr.io/@std/tar | @std/tar} instead.
|
||||
* `@std/archive` will be removed in the future.
|
||||
*
|
||||
* @experimental **UNSTABLE**: New API, yet to be vetted.
|
||||
*/
|
||||
export interface TarMeta extends TarInfo {
|
||||
/**
|
||||
* The name of the file, with directory names (if any) preceding the file
|
||||
* name, separated by slashes.
|
||||
*/
|
||||
fileName: string;
|
||||
/**
|
||||
* The size of the file in bytes; for archive members that are symbolic or
|
||||
* hard links to another file, this field is specified as zero.
|
||||
*/
|
||||
fileSize?: number;
|
||||
}
|
||||
|
||||
/** The type of file archived. */
|
||||
export enum FileTypes {
|
||||
"file" = 0,
|
||||
"link" = 1,
|
||||
"symlink" = 2,
|
||||
"character-device" = 3,
|
||||
"block-device" = 4,
|
||||
"directory" = 5,
|
||||
"fifo" = 6,
|
||||
"contiguous-file" = 7,
|
||||
}
|
||||
|
||||
export const HEADER_LENGTH = 512;
|
||||
|
||||
/*
|
||||
struct posix_header { // byte offset
|
||||
char name[100]; // 0
|
||||
char mode[8]; // 100
|
||||
char uid[8]; // 108
|
||||
char gid[8]; // 116
|
||||
char size[12]; // 124
|
||||
char mtime[12]; // 136
|
||||
char chksum[8]; // 148
|
||||
char typeflag; // 156
|
||||
char linkname[100]; // 157
|
||||
char magic[6]; // 257
|
||||
char version[2]; // 263
|
||||
char uname[32]; // 265
|
||||
char gname[32]; // 297
|
||||
char devmajor[8]; // 329
|
||||
char devminor[8]; // 337
|
||||
char prefix[155]; // 345
|
||||
// 500
|
||||
};
|
||||
*/
|
||||
|
||||
export const USTAR_STRUCTURE = [
|
||||
{
|
||||
field: "fileName",
|
||||
length: 100,
|
||||
},
|
||||
{
|
||||
field: "fileMode",
|
||||
length: 8,
|
||||
},
|
||||
{
|
||||
field: "uid",
|
||||
length: 8,
|
||||
},
|
||||
{
|
||||
field: "gid",
|
||||
length: 8,
|
||||
},
|
||||
{
|
||||
field: "fileSize",
|
||||
length: 12,
|
||||
},
|
||||
{
|
||||
field: "mtime",
|
||||
length: 12,
|
||||
},
|
||||
{
|
||||
field: "checksum",
|
||||
length: 8,
|
||||
},
|
||||
{
|
||||
field: "type",
|
||||
length: 1,
|
||||
},
|
||||
{
|
||||
field: "linkName",
|
||||
length: 100,
|
||||
},
|
||||
{
|
||||
field: "ustar",
|
||||
length: 8,
|
||||
},
|
||||
{
|
||||
field: "owner",
|
||||
length: 32,
|
||||
},
|
||||
{
|
||||
field: "group",
|
||||
length: 32,
|
||||
},
|
||||
{
|
||||
field: "majorNumber",
|
||||
length: 8,
|
||||
},
|
||||
{
|
||||
field: "minorNumber",
|
||||
length: 8,
|
||||
},
|
||||
{
|
||||
field: "fileNamePrefix",
|
||||
length: 155,
|
||||
},
|
||||
{
|
||||
field: "padding",
|
||||
length: 12,
|
||||
},
|
||||
] as const;
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
export type UstarFields = (typeof USTAR_STRUCTURE)[number]["field"];
|
||||
|
||||
/**
|
||||
* Thrown when a read from a stream fails to read the
|
||||
* requested number of bytes.
|
||||
*/
|
||||
class PartialReadError extends Error {
|
||||
partial: Uint8Array;
|
||||
|
||||
constructor(partial: Uint8Array) {
|
||||
super("Encountered UnexpectedEof, data only partially read");
|
||||
this.name = this.constructor.name;
|
||||
this.partial = partial;
|
||||
}
|
||||
}
|
||||
|
||||
export async function readBlock(
|
||||
reader: Reader,
|
||||
p: Uint8Array,
|
||||
): Promise<number | null> {
|
||||
let bytesRead = 0;
|
||||
while (bytesRead < p.length) {
|
||||
const rr = await reader.read(p.subarray(bytesRead));
|
||||
if (rr === null) {
|
||||
if (bytesRead === 0) {
|
||||
return null;
|
||||
} else {
|
||||
throw new PartialReadError(p.subarray(0, bytesRead));
|
||||
}
|
||||
}
|
||||
bytesRead += rr;
|
||||
}
|
||||
return bytesRead;
|
||||
}
|
@ -1,24 +0,0 @@
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
// This module is browser compatible.
|
||||
|
||||
import type { Reader } from "@std/io/types";
|
||||
|
||||
export class MultiReader implements Reader {
|
||||
readonly #readers: Reader[];
|
||||
#currentIndex = 0;
|
||||
|
||||
constructor(readers: Reader[]) {
|
||||
this.#readers = [...readers];
|
||||
}
|
||||
|
||||
async read(p: Uint8Array): Promise<number | null> {
|
||||
const r = this.#readers[this.#currentIndex];
|
||||
if (!r) return null;
|
||||
const result = await r.read(p);
|
||||
if (result === null) {
|
||||
this.#currentIndex++;
|
||||
return 0;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
@ -1,6 +0,0 @@
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
import { dirname, fromFileUrl, resolve } from "@std/path";
|
||||
|
||||
const moduleDir = dirname(fromFileUrl(import.meta.url));
|
||||
export const testdataDir = resolve(moduleDir, "testdata");
|
||||
export const filePath = resolve(testdataDir, "example.txt");
|
@ -1,9 +0,0 @@
|
||||
{
|
||||
"name": "@std/archive",
|
||||
"version": "0.225.4",
|
||||
"exports": {
|
||||
".": "./mod.ts",
|
||||
"./tar": "./tar.ts",
|
||||
"./untar": "./untar.ts"
|
||||
}
|
||||
}
|
@ -1,73 +0,0 @@
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
/*!
|
||||
* Ported and modified from: https://github.com/beatgammit/tar-js and
|
||||
* licensed as:
|
||||
*
|
||||
* (The MIT License)
|
||||
*
|
||||
* Copyright (c) 2011 T. Jameson Little
|
||||
* Copyright (c) 2019 Jun Kato
|
||||
* Copyright (c) 2018-2024 the Deno authors
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Tar is a utility for collecting multiple files (or any arbitrary data) into one
|
||||
* archive file, while untar is the inverse utility to extract the files from an
|
||||
* archive. Files are not compressed, only collected into the archive.
|
||||
*
|
||||
* ```ts ignore
|
||||
* import { Tar } from "@std/archive/tar";
|
||||
* import { Buffer } from "@std/io/buffer";
|
||||
* import { copy } from "@std/io/copy";
|
||||
*
|
||||
* const tar = new Tar();
|
||||
*
|
||||
* // Now that we've created our tar, let's add some files to it:
|
||||
*
|
||||
* const content = new TextEncoder().encode("Some arbitrary content");
|
||||
* await tar.append("deno.txt", {
|
||||
* reader: new Buffer(content),
|
||||
* contentSize: content.byteLength,
|
||||
* });
|
||||
*
|
||||
* // This file is sourced from the filesystem (and renamed in the archive)
|
||||
* await tar.append("filename_in_archive.txt", {
|
||||
* filePath: "./filename_on_filesystem.txt",
|
||||
* });
|
||||
*
|
||||
* // Now let's write the tar (with its two files) to the filesystem
|
||||
* // use tar.getReader() to read the contents.
|
||||
*
|
||||
* const writer = await Deno.open("./out.tar", { write: true, create: true });
|
||||
* await copy(tar.getReader(), writer);
|
||||
* writer.close();
|
||||
* ```
|
||||
*
|
||||
* @deprecated Use {@linkcode https://jsr.io/@std/tar | @std/tar} instead.
|
||||
* `@std/archive` will be removed in the future.
|
||||
*
|
||||
* @experimental **UNSTABLE**: New API, yet to be vetted.
|
||||
*
|
||||
* @module
|
||||
*/
|
||||
export * from "./tar.ts";
|
||||
export * from "./untar.ts";
|
497
archive/tar.ts
497
archive/tar.ts
@ -1,497 +0,0 @@
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
/*!
|
||||
* Ported and modified from: https://github.com/beatgammit/tar-js and
|
||||
* licensed as:
|
||||
*
|
||||
* (The MIT License)
|
||||
*
|
||||
* Copyright (c) 2011 T. Jameson Little
|
||||
* Copyright (c) 2019 Jun Kato
|
||||
* Copyright (c) 2018-2024 the Deno authors
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
|
||||
import {
|
||||
FileTypes,
|
||||
type TarInfo,
|
||||
type TarMeta,
|
||||
USTAR_STRUCTURE,
|
||||
} from "./_common.ts";
|
||||
import type { Reader } from "@std/io/types";
|
||||
import { MultiReader } from "./_multi_reader.ts";
|
||||
import { Buffer } from "@std/io/buffer";
|
||||
import { HEADER_LENGTH } from "./_common.ts";
|
||||
|
||||
export type { TarInfo, TarMeta };
|
||||
|
||||
/**
|
||||
* Options for {@linkcode Tar.append}.
|
||||
*
|
||||
* @deprecated Use {@linkcode https://jsr.io/@std/tar | @std/tar} instead.
|
||||
* `@std/archive` will be removed in the future.
|
||||
*
|
||||
* @experimental **UNSTABLE**: New API, yet to be vetted.
|
||||
*/
|
||||
export interface TarOptions extends TarInfo {
|
||||
/**
|
||||
* Filepath of the file to append to the archive
|
||||
*/
|
||||
filePath?: string;
|
||||
|
||||
/**
|
||||
* A Reader of any arbitrary content to append to the archive
|
||||
*/
|
||||
reader?: Reader;
|
||||
|
||||
/**
|
||||
* Size of the content to be appended. This is only required
|
||||
* when passing a reader to the archive.
|
||||
*/
|
||||
contentSize?: number;
|
||||
}
|
||||
|
||||
const USTAR_MAGIC_HEADER = "ustar\u000000" as const;
|
||||
|
||||
/**
|
||||
* Simple file reader
|
||||
*/
|
||||
class FileReader implements Reader {
|
||||
#file: Deno.FsFile | undefined;
|
||||
#filePath: string;
|
||||
|
||||
constructor(filePath: string) {
|
||||
this.#filePath = filePath;
|
||||
}
|
||||
|
||||
async read(p: Uint8Array): Promise<number | null> {
|
||||
if (!this.#file) {
|
||||
this.#file = await Deno.open(this.#filePath, { read: true });
|
||||
}
|
||||
const res = await this.#file.read(p);
|
||||
if (res === null) {
|
||||
this.#file.close();
|
||||
this.#file = undefined;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pads a number with leading zeros to a specified number of bytes.
|
||||
*
|
||||
* @param num The number to pad.
|
||||
* @param bytes The number of bytes to pad the number to.
|
||||
* @returns The padded number as a string.
|
||||
*/
|
||||
function pad(num: number, bytes: number): string {
|
||||
return num.toString(8).padStart(bytes, "0");
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats the header data for a tar file entry.
|
||||
*
|
||||
* @param data The data object containing the values for the tar header fields.
|
||||
* @returns The formatted header data as a Uint8Array.
|
||||
*/
|
||||
function formatHeader(data: TarData): Uint8Array {
|
||||
const encoder = new TextEncoder();
|
||||
const buffer = new Uint8Array(HEADER_LENGTH);
|
||||
let offset = 0;
|
||||
for (const { field, length } of USTAR_STRUCTURE) {
|
||||
const entry = encoder.encode(data[field as keyof TarData] ?? "");
|
||||
buffer.set(entry, offset);
|
||||
offset += length;
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Base interface for {@linkcode TarDataWithSource}.
|
||||
*
|
||||
* @deprecated Use {@linkcode https://jsr.io/@std/tar | @std/tar} instead.
|
||||
* `@std/archive` will be removed in the future.
|
||||
*
|
||||
* @experimental **UNSTABLE**: New API, yet to be vetted.
|
||||
*/
|
||||
export interface TarData {
|
||||
/** Name of the file, excluding directory names (if any). */
|
||||
fileName?: string;
|
||||
/** Directory names preceding the file name (if any). */
|
||||
fileNamePrefix?: string;
|
||||
/**
|
||||
* The underlying raw `st_mode` bits that contain the standard Unix
|
||||
* permissions for this file/directory.
|
||||
*/
|
||||
fileMode?: string;
|
||||
/**
|
||||
* Numeric user ID of the file owner. This is ignored if the operating system
|
||||
* does not support numeric user IDs.
|
||||
*/
|
||||
uid?: string;
|
||||
/**
|
||||
* Numeric group ID of the file owner. This is ignored if the operating
|
||||
* system does not support numeric group IDs.
|
||||
*/
|
||||
gid?: string;
|
||||
/**
|
||||
* The size of the file in bytes; for archive members that are symbolic or
|
||||
* hard links to another file, this field is specified as zero.
|
||||
*/
|
||||
fileSize?: string;
|
||||
/**
|
||||
* Data modification time of the file at the time it was archived. It
|
||||
* represents the integer number of seconds since January 1, 1970, 00:00 UTC.
|
||||
*/
|
||||
mtime?: string;
|
||||
/** The simple sum of all bytes in the header block */
|
||||
checksum?: string;
|
||||
/**
|
||||
* The type of file archived.
|
||||
*
|
||||
* @see {@linkcode FileTypes}
|
||||
*/
|
||||
type?: string;
|
||||
/** Ustar magic header */
|
||||
ustar?: string;
|
||||
/** The name of the file owner. */
|
||||
owner?: string;
|
||||
/** The group that the file owner belongs to. */
|
||||
group?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tar data interface for {@linkcode Tar.data}.
|
||||
*
|
||||
* @deprecated Use {@linkcode https://jsr.io/@std/tar | @std/tar} instead.
|
||||
* `@std/archive` will be removed in the future.
|
||||
*
|
||||
* @experimental **UNSTABLE**: New API, yet to be vetted.
|
||||
*/
|
||||
export interface TarDataWithSource extends TarData {
|
||||
/**
|
||||
* Path of the file to read.
|
||||
*/
|
||||
filePath?: string;
|
||||
/**
|
||||
* Buffer reader.
|
||||
*/
|
||||
reader?: Reader;
|
||||
}
|
||||
|
||||
/**
|
||||
* ### Overview
|
||||
* A class to create a tar archive. Tar archives allow for storing multiple files in a
|
||||
* single file (called an archive, or sometimes a tarball). These archives typically
|
||||
* have the '.tar' extension.
|
||||
*
|
||||
* @deprecated Use {@linkcode https://jsr.io/@std/tar | @std/tar} instead.
|
||||
* `@std/archive` will be removed in the future.
|
||||
*
|
||||
* ### Usage
|
||||
* The workflow is to create a Tar instance, append files to it, and then write the
|
||||
* tar archive to the filesystem (or other output stream). See the worked example
|
||||
* below for details.
|
||||
*
|
||||
* ### Compression
|
||||
* Tar archives are not compressed by default. If you want to compress the archive,
|
||||
* you may compress the tar archive after creation, but this capability is not provided
|
||||
* here.
|
||||
*
|
||||
* ### File format and limitations
|
||||
*
|
||||
* The ustar file format is used for creating the archive file.
|
||||
* While this format is compatible with most tar readers,
|
||||
* the format has several limitations, including:
|
||||
* * Files must be smaller than 8GiB
|
||||
* * Filenames (including path) must be shorter than 256 characters
|
||||
* * Filenames (including path) cannot contain non-ASCII characters
|
||||
* * Sparse files are not supported
|
||||
*
|
||||
* @example Usage
|
||||
* ```ts ignore
|
||||
* import { Tar } from "@std/archive/tar";
|
||||
* import { Buffer } from "@std/io/buffer";
|
||||
* import { copy } from "@std/io/copy";
|
||||
*
|
||||
* const tar = new Tar();
|
||||
*
|
||||
* // Now that we've created our tar, let's add some files to it:
|
||||
*
|
||||
* const content = new TextEncoder().encode("Some arbitrary content");
|
||||
* await tar.append("deno.txt", {
|
||||
* reader: new Buffer(content),
|
||||
* contentSize: content.byteLength,
|
||||
* });
|
||||
*
|
||||
* // This file is sourced from the filesystem (and renamed in the archive)
|
||||
* await tar.append("filename_in_archive.txt", {
|
||||
* filePath: "./filename_on_filesystem.txt",
|
||||
* });
|
||||
*
|
||||
* // Now let's write the tar (with its two files) to the filesystem
|
||||
* // use tar.getReader() to read the contents.
|
||||
*
|
||||
* const writer = await Deno.open("./out.tar", { write: true, create: true });
|
||||
* await copy(tar.getReader(), writer);
|
||||
* writer.close();
|
||||
* ```
|
||||
*
|
||||
* @experimental **UNSTABLE**: New API, yet to be vetted.
|
||||
*/
|
||||
export class Tar {
|
||||
/** Tar data. */
|
||||
#data: TarDataWithSource[];
|
||||
|
||||
/** Constructs a new instance. */
|
||||
constructor() {
|
||||
this.#data = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Append a file or reader of arbitrary content to this tar archive. Directories
|
||||
* appended to the archive append only the directory itself to the archive, not
|
||||
* its contents. To add a directory and its contents, recursively append the
|
||||
* directory's contents. Directories and subdirectories will be created automatically
|
||||
* in the archive as required.
|
||||
*
|
||||
* @param filenameInArchive File name of the content in the archive. E.g.
|
||||
* `test.txt`. Use slash for directory separators.
|
||||
* @param source Details of the source of the content including the
|
||||
* reference to the content itself and potentially any related metadata.
|
||||
*
|
||||
* @example Usage
|
||||
* ```ts ignore
|
||||
* import { Tar } from "@std/archive/tar";
|
||||
* import { Buffer } from "@std/io/buffer";
|
||||
* import { copy } from "@std/io/copy";
|
||||
*
|
||||
* const tar = new Tar();
|
||||
*
|
||||
* // Now that we've created our tar, let's add some files to it:
|
||||
*
|
||||
* const content = new TextEncoder().encode("Some arbitrary content");
|
||||
* await tar.append("deno.txt", {
|
||||
* reader: new Buffer(content),
|
||||
* contentSize: content.byteLength,
|
||||
* });
|
||||
*
|
||||
* // This file is sourced from the filesystem (and renamed in the archive)
|
||||
* await tar.append("filename_in_archive.txt", {
|
||||
* filePath: "./filename_on_filesystem.txt",
|
||||
* });
|
||||
*
|
||||
* // Now let's write the tar (with its two files) to the filesystem
|
||||
* // use tar.getReader() to read the contents.
|
||||
*
|
||||
* const writer = await Deno.open("./out.tar", { write: true, create: true });
|
||||
* await copy(tar.getReader(), writer);
|
||||
* writer.close();
|
||||
* ```
|
||||
*/
|
||||
async append(filenameInArchive: string, source: TarOptions) {
|
||||
if (typeof filenameInArchive !== "string") {
|
||||
throw new Error("Cannot append data: File name is not a string");
|
||||
}
|
||||
let fileName = filenameInArchive;
|
||||
|
||||
/**
|
||||
* Ustar format has a limitation of file name length. Specifically:
|
||||
* 1. File names can contain at most 255 bytes.
|
||||
* 2. File names longer than 100 bytes must be split at a directory separator in two parts,
|
||||
* the first being at most 155 bytes long. So, in most cases file names must be a bit shorter
|
||||
* than 255 bytes.
|
||||
*/
|
||||
// separate file name into two parts if needed
|
||||
let fileNamePrefix: string | undefined;
|
||||
if (fileName.length > 100) {
|
||||
let i = fileName.length;
|
||||
while (i >= 0) {
|
||||
i = fileName.lastIndexOf("/", i);
|
||||
if (i <= 155) {
|
||||
fileNamePrefix = fileName.slice(0, i);
|
||||
fileName = fileName.slice(i + 1);
|
||||
break;
|
||||
}
|
||||
i--;
|
||||
}
|
||||
const errMsg =
|
||||
"Cannot append data: The 'ustar' format does not allow a long file name (length of [file name" +
|
||||
"prefix] + / + [file name] must be shorter than 256 bytes)";
|
||||
if (i < 0 || fileName.length > 100) {
|
||||
throw new Error(errMsg);
|
||||
} else {
|
||||
if (fileNamePrefix === undefined) {
|
||||
throw new TypeError("File name prefix is undefined");
|
||||
}
|
||||
if (fileNamePrefix.length > 155) {
|
||||
throw new Error(errMsg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
source = source ?? {};
|
||||
|
||||
// set meta data
|
||||
let info: Deno.FileInfo | undefined;
|
||||
if (source.filePath) {
|
||||
info = await Deno.stat(source.filePath);
|
||||
if (info.isDirectory) {
|
||||
info.size = 0;
|
||||
source.reader = new Buffer();
|
||||
}
|
||||
}
|
||||
|
||||
const mode = source.fileMode || (info && info.mode) ||
|
||||
parseInt("777", 8) & 0xfff /* 511 */;
|
||||
const mtime = Math.floor(
|
||||
source.mtime ?? (info?.mtime ?? new Date()).valueOf() / 1000,
|
||||
);
|
||||
const uid = source.uid ?? 0;
|
||||
const gid = source.gid ?? 0;
|
||||
|
||||
if (typeof source.owner === "string" && source.owner.length >= 32) {
|
||||
throw new Error(
|
||||
"Cannot append data: The 'ustar' format does not allow owner name length >= 32 bytes",
|
||||
);
|
||||
}
|
||||
if (typeof source.group === "string" && source.group.length >= 32) {
|
||||
throw new Error(
|
||||
"Cannot append data: The 'ustar' format does not allow group name length >= 32 bytes",
|
||||
);
|
||||
}
|
||||
|
||||
const fileSize = info?.size ?? source.contentSize;
|
||||
if (fileSize === undefined) {
|
||||
throw new TypeError("Cannot append data: The file size is not defined");
|
||||
}
|
||||
|
||||
const type = source.type
|
||||
? FileTypes[source.type as keyof typeof FileTypes]
|
||||
: (info?.isDirectory ? FileTypes.directory : FileTypes.file);
|
||||
const tarData: TarDataWithSource = {
|
||||
fileName,
|
||||
fileMode: pad(mode, 7),
|
||||
uid: pad(uid, 7),
|
||||
gid: pad(gid, 7),
|
||||
fileSize: pad(fileSize, 11),
|
||||
mtime: pad(mtime, 11),
|
||||
checksum: " ",
|
||||
type: type.toString(),
|
||||
ustar: USTAR_MAGIC_HEADER,
|
||||
owner: source.owner ?? "",
|
||||
group: source.group ?? "",
|
||||
};
|
||||
if (fileNamePrefix !== undefined) {
|
||||
tarData.fileNamePrefix = fileNamePrefix;
|
||||
}
|
||||
if (source.filePath !== undefined) {
|
||||
tarData.filePath = source.filePath;
|
||||
}
|
||||
if (source.reader !== undefined) {
|
||||
tarData.reader = source.reader;
|
||||
}
|
||||
|
||||
// calculate the checksum
|
||||
let checksum = 0;
|
||||
const encoder = new TextEncoder();
|
||||
Object.keys(tarData)
|
||||
.filter((key): boolean => ["filePath", "reader"].indexOf(key) < 0)
|
||||
.forEach(function (key) {
|
||||
checksum += encoder
|
||||
.encode(tarData[key as keyof TarData])
|
||||
.reduce((p, c): number => p + c, 0);
|
||||
});
|
||||
|
||||
tarData.checksum = pad(checksum, 6) + "\u0000 ";
|
||||
this.#data.push(tarData);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a {@linkcode Reader} instance for this tar archive.
|
||||
*
|
||||
* @returns A reader instance for the tar archive.
|
||||
*
|
||||
* @example Usage
|
||||
* ```ts ignore
|
||||
* import { Tar } from "@std/archive/tar";
|
||||
* import { Buffer } from "@std/io/buffer";
|
||||
* import { copy } from "@std/io/copy";
|
||||
*
|
||||
* const tar = new Tar();
|
||||
*
|
||||
* // Now that we've created our tar, let's add some files to it:
|
||||
*
|
||||
* const content = new TextEncoder().encode("Some arbitrary content");
|
||||
* await tar.append("deno.txt", {
|
||||
* reader: new Buffer(content),
|
||||
* contentSize: content.byteLength,
|
||||
* });
|
||||
*
|
||||
* // This file is sourced from the filesystem (and renamed in the archive)
|
||||
* await tar.append("filename_in_archive.txt", {
|
||||
* filePath: "./filename_on_filesystem.txt",
|
||||
* });
|
||||
*
|
||||
* // Now let's write the tar (with its two files) to the filesystem
|
||||
* // use tar.getReader() to read the contents.
|
||||
*
|
||||
* const writer = await Deno.open("./out.tar", { write: true, create: true });
|
||||
* await copy(tar.getReader(), writer);
|
||||
* writer.close();
|
||||
* ```
|
||||
*/
|
||||
getReader(): Reader {
|
||||
const readers: Reader[] = [];
|
||||
this.#data.forEach((tarData) => {
|
||||
let { reader } = tarData;
|
||||
const { filePath } = tarData;
|
||||
const headerArr = formatHeader(tarData);
|
||||
readers.push(new Buffer(headerArr));
|
||||
if (!reader) {
|
||||
if (filePath === undefined) {
|
||||
throw new TypeError(
|
||||
"Cannot get the reader for the tar archive: FilePath is not defined",
|
||||
);
|
||||
}
|
||||
reader = new FileReader(filePath);
|
||||
}
|
||||
readers.push(reader);
|
||||
|
||||
// to the nearest multiple of recordSize
|
||||
if (tarData.fileSize === undefined) {
|
||||
throw new TypeError(
|
||||
"Cannot get the reader for the tar archive: FileSize is not defined",
|
||||
);
|
||||
}
|
||||
readers.push(
|
||||
new Buffer(
|
||||
new Uint8Array(
|
||||
HEADER_LENGTH -
|
||||
(parseInt(tarData.fileSize, 8) % HEADER_LENGTH || HEADER_LENGTH),
|
||||
),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
// append 2 empty records
|
||||
readers.push(new Buffer(new Uint8Array(HEADER_LENGTH * 2)));
|
||||
return new MultiReader(readers);
|
||||
}
|
||||
}
|
@ -1,124 +0,0 @@
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
/**
|
||||
* Tar test
|
||||
*
|
||||
* **test summary**
|
||||
* - create a tar archive in memory containing output.txt and dir/tar.ts.
|
||||
* - read and deflate a tar archive containing output.txt
|
||||
*
|
||||
* **to run this test**
|
||||
* deno run --allow-read archive/tar_test.ts
|
||||
*/
|
||||
import { assert, assertEquals } from "@std/assert";
|
||||
import { resolve } from "@std/path";
|
||||
import { Tar } from "./tar.ts";
|
||||
import { Untar } from "./untar.ts";
|
||||
import { Buffer } from "@std/io/buffer";
|
||||
import { copy } from "@std/io/copy";
|
||||
import { readAll } from "@std/io/read-all";
|
||||
import { filePath, testdataDir } from "./_test_utils.ts";
|
||||
|
||||
Deno.test("createTarArchive", async function () {
|
||||
// initialize
|
||||
const tar = new Tar();
|
||||
|
||||
// put data on memory
|
||||
const content = new TextEncoder().encode("hello tar world!");
|
||||
await tar.append("output.txt", {
|
||||
reader: new Buffer(content),
|
||||
contentSize: content.byteLength,
|
||||
});
|
||||
|
||||
// put a file
|
||||
await tar.append("dir/tar.ts", { filePath });
|
||||
|
||||
// write tar data to a buffer
|
||||
const writer = new Buffer();
|
||||
const wrote = await copy(tar.getReader(), writer);
|
||||
|
||||
/**
|
||||
* 3072 = 512 (header) + 512 (content) + 512 (header) + 512 (content)
|
||||
* + 1024 (footer)
|
||||
*/
|
||||
assertEquals(wrote, 3072);
|
||||
});
|
||||
|
||||
Deno.test("Tar() deflates tar archive", async function () {
|
||||
const fileName = "output.txt";
|
||||
const text = "hello tar world!";
|
||||
|
||||
// create a tar archive
|
||||
const tar = new Tar();
|
||||
const content = new TextEncoder().encode(text);
|
||||
await tar.append(fileName, {
|
||||
reader: new Buffer(content),
|
||||
contentSize: content.byteLength,
|
||||
});
|
||||
|
||||
// read data from a tar archive
|
||||
const untar = new Untar(tar.getReader());
|
||||
const result = await untar.extract();
|
||||
assert(result !== null);
|
||||
const untarText = new TextDecoder("utf-8").decode(await readAll(result));
|
||||
|
||||
assertEquals(await untar.extract(), null); // EOF
|
||||
// tests
|
||||
assertEquals(result.fileName, fileName);
|
||||
assertEquals(untarText, text);
|
||||
});
|
||||
|
||||
Deno.test("Tar() appends file with long name to tar archive", async function (): Promise<
|
||||
void
|
||||
> {
|
||||
// 10 * 15 + 13 = 163 bytes
|
||||
const fileName = "long-file-name/".repeat(10) + "file-name.txt";
|
||||
const text = "hello tar world!";
|
||||
|
||||
// create a tar archive
|
||||
const tar = new Tar();
|
||||
const content = new TextEncoder().encode(text);
|
||||
await tar.append(fileName, {
|
||||
reader: new Buffer(content),
|
||||
contentSize: content.byteLength,
|
||||
});
|
||||
|
||||
// read data from a tar archive
|
||||
const untar = new Untar(tar.getReader());
|
||||
const result = await untar.extract();
|
||||
assert(result !== null);
|
||||
assert(!result.consumed);
|
||||
const untarText = new TextDecoder("utf-8").decode(await readAll(result));
|
||||
assert(result.consumed);
|
||||
|
||||
// tests
|
||||
assertEquals(result.fileName, fileName);
|
||||
assertEquals(untarText, text);
|
||||
});
|
||||
|
||||
Deno.test("Tar() checks directory entry type", async function () {
|
||||
const tar = new Tar();
|
||||
|
||||
await tar.append("directory/", {
|
||||
reader: new Buffer(),
|
||||
contentSize: 0,
|
||||
type: "directory",
|
||||
});
|
||||
|
||||
const filePath = resolve(testdataDir);
|
||||
await tar.append("archive/testdata/", {
|
||||
filePath,
|
||||
});
|
||||
|
||||
const outputFile = resolve(testdataDir, "directory_type_test.tar");
|
||||
using file = await Deno.open(outputFile, { create: true, write: true });
|
||||
await copy(tar.getReader(), file);
|
||||
|
||||
using reader = await Deno.open(outputFile, { read: true });
|
||||
const untar = new Untar(reader);
|
||||
await Array.fromAsync(
|
||||
untar,
|
||||
(entry) => assertEquals(entry.type, "directory"),
|
||||
);
|
||||
|
||||
await Deno.remove(outputFile);
|
||||
});
|
BIN
archive/testdata/deno.tar
vendored
BIN
archive/testdata/deno.tar
vendored
Binary file not shown.
1
archive/testdata/example.txt
vendored
1
archive/testdata/example.txt
vendored
@ -1 +0,0 @@
|
||||
hello world!
|
BIN
archive/testdata/with_link.tar
vendored
BIN
archive/testdata/with_link.tar
vendored
Binary file not shown.
570
archive/untar.ts
570
archive/untar.ts
@ -1,570 +0,0 @@
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
|
||||
/*!
|
||||
* Ported and modified from: https://github.com/beatgammit/tar-js and
|
||||
* licensed as:
|
||||
*
|
||||
* (The MIT License)
|
||||
*
|
||||
* Copyright (c) 2011 T. Jameson Little
|
||||
* Copyright (c) 2019 Jun Kato
|
||||
* Copyright (c) 2018-2022 the Deno authors
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
|
||||
import {
|
||||
FileTypes,
|
||||
HEADER_LENGTH,
|
||||
readBlock,
|
||||
type TarMeta,
|
||||
USTAR_STRUCTURE,
|
||||
type UstarFields,
|
||||
} from "./_common.ts";
|
||||
import { readAll } from "@std/io/read-all";
|
||||
import type { Reader, Seeker } from "@std/io/types";
|
||||
|
||||
export type { Reader, Seeker };
|
||||
|
||||
/**
|
||||
* Extend TarMeta with the `linkName` property so that readers can access
|
||||
* symbolic link values without polluting the world of archive writers.
|
||||
*
|
||||
* @deprecated Use {@linkcode https://jsr.io/@std/tar | @std/tar} instead.
|
||||
* `@std/archive` will be removed in the future.
|
||||
*
|
||||
* @experimental **UNSTABLE**: New API, yet to be vetted.
|
||||
*/
|
||||
export interface TarMetaWithLinkName extends TarMeta {
|
||||
/** File name of the symbolic link. */
|
||||
linkName?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tar header with raw, unprocessed bytes as values.
|
||||
*
|
||||
* @deprecated Use {@linkcode https://jsr.io/@std/tar | @std/tar} instead.
|
||||
* `@std/archive` will be removed in the future.
|
||||
*
|
||||
* @experimental **UNSTABLE**: New API, yet to be vetted.
|
||||
*/
|
||||
export type TarHeader = {
|
||||
[key in UstarFields]: Uint8Array;
|
||||
};
|
||||
|
||||
// https://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html#tag_20_92_13_06
|
||||
// eight checksum bytes taken to be ascii spaces (decimal value 32)
|
||||
const initialChecksum = 8 * 32;
|
||||
|
||||
/**
|
||||
* Trims a Uint8Array by removing any trailing zero bytes.
|
||||
*
|
||||
* @param buffer The Uint8Array to trim.
|
||||
* @returns A new Uint8Array with trailing zero bytes removed, or the original
|
||||
* buffer if no trailing zero bytes are found.
|
||||
*/
|
||||
function trim(buffer: Uint8Array): Uint8Array {
|
||||
const index = buffer.indexOf(0);
|
||||
return index === -1 ? buffer : buffer.subarray(0, index);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse file header in a tar archive
|
||||
* @param length
|
||||
*/
|
||||
function parseHeader(buffer: Uint8Array): TarHeader {
|
||||
const data = {} as TarHeader;
|
||||
let offset = 0;
|
||||
USTAR_STRUCTURE.forEach(function (value) {
|
||||
const arr = buffer.subarray(offset, offset + value.length);
|
||||
data[value.field] = arr;
|
||||
offset += value.length;
|
||||
});
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tar entry
|
||||
*
|
||||
* @deprecated Use {@linkcode https://jsr.io/@std/tar | @std/tar} instead.
|
||||
* `@std/archive` will be removed in the future.
|
||||
*
|
||||
* @experimental **UNSTABLE**: New API, yet to be vetted.
|
||||
*
|
||||
* @example Usage
|
||||
* ```ts ignore
|
||||
* import { TarEntry } from "@std/archive/untar";
|
||||
* import { Buffer } from "@std/io/buffer";
|
||||
*
|
||||
* const content = new TextEncoder().encode("hello tar world!");
|
||||
* const reader = new Buffer(content);
|
||||
* const tarMeta = {
|
||||
* fileName: "archive/",
|
||||
* fileSize: 0,
|
||||
* fileMode: 509,
|
||||
* mtime: 1591800767,
|
||||
* uid: 1001,
|
||||
* gid: 1001,
|
||||
* owner: "deno",
|
||||
* group: "deno",
|
||||
* type: "directory",
|
||||
* };
|
||||
* const tarEntry: TarEntry = new TarEntry(tarMeta, reader);
|
||||
* ```
|
||||
*/
|
||||
export interface TarEntry extends TarMetaWithLinkName {}
|
||||
|
||||
/**
|
||||
* Contains tar header metadata and a reader to the entry's body.
|
||||
*
|
||||
* @deprecated Use {@linkcode https://jsr.io/@std/tar | @std/tar} instead.
|
||||
* `@std/archive` will be removed in the future.
|
||||
*
|
||||
* @experimental **UNSTABLE**: New API, yet to be vetted.
|
||||
*
|
||||
* @example Usage
|
||||
* ```ts ignore
|
||||
* import { TarEntry } from "@std/archive/untar";
|
||||
* import { Buffer } from "@std/io/buffer";
|
||||
*
|
||||
* const content = new TextEncoder().encode("hello tar world!");
|
||||
* const reader = new Buffer(content);
|
||||
* const tarMeta = {
|
||||
* fileName: "archive/",
|
||||
* fileSize: 0,
|
||||
* fileMode: 509,
|
||||
* mtime: 1591800767,
|
||||
* uid: 1001,
|
||||
* gid: 1001,
|
||||
* owner: "deno",
|
||||
* group: "deno",
|
||||
* type: "directory",
|
||||
* };
|
||||
* const tarEntry: TarEntry = new TarEntry(tarMeta, reader);
|
||||
* ```
|
||||
*/
|
||||
export class TarEntry implements Reader {
|
||||
#reader: Reader | (Reader & Seeker);
|
||||
#size: number;
|
||||
#read = 0;
|
||||
#consumed = false;
|
||||
#entrySize: number;
|
||||
|
||||
/**
|
||||
* Constructs a new instance.
|
||||
*
|
||||
* @param meta The metadata of the entry.
|
||||
* @param reader The reader to read the entry from.
|
||||
*/
|
||||
constructor(
|
||||
meta: TarMetaWithLinkName,
|
||||
reader: Reader | (Reader & Seeker),
|
||||
) {
|
||||
Object.assign(this, meta);
|
||||
this.#reader = reader;
|
||||
|
||||
// File Size
|
||||
this.#size = this.fileSize ?? 0;
|
||||
// Entry Size
|
||||
const blocks = Math.ceil(this.#size / HEADER_LENGTH);
|
||||
this.#entrySize = blocks * HEADER_LENGTH;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the entry has already been consumed.
|
||||
*
|
||||
* @returns Whether the entry has already been consumed.
|
||||
*
|
||||
* @example Usage
|
||||
* ```ts ignore
|
||||
* import { TarEntry } from "@std/archive/untar";
|
||||
* import { Buffer } from "@std/io/buffer";
|
||||
* import { assertEquals } from "@std/assert/equals";
|
||||
*
|
||||
* const content = new TextEncoder().encode("hello tar world!");
|
||||
* const reader = new Buffer(content);
|
||||
* const tarMeta = {
|
||||
* fileName: "archive/",
|
||||
* fileSize: 0,
|
||||
* fileMode: 509,
|
||||
* mtime: 1591800767,
|
||||
* uid: 1001,
|
||||
* gid: 1001,
|
||||
* owner: "deno",
|
||||
* group: "deno",
|
||||
* type: "directory",
|
||||
* };
|
||||
* const tarEntry: TarEntry = new TarEntry(tarMeta, reader);
|
||||
*
|
||||
* assertEquals(tarEntry.consumed, false);
|
||||
* ```
|
||||
*/
|
||||
get consumed(): boolean {
|
||||
return this.#consumed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads up to `p.byteLength` bytes of the tar entry into `p`. It resolves to
|
||||
* the number of bytes read (`0 < n <= p.byteLength`) and rejects if any
|
||||
* error encountered. Even if read() resolves to n < p.byteLength, it may use
|
||||
* all of `p` as scratch space during the call. If some data is available but
|
||||
* not `p.byteLength bytes`, read() conventionally resolves to what is available
|
||||
* instead of waiting for more.
|
||||
*
|
||||
* @param p The buffer to read the entry into.
|
||||
* @returns The number of bytes read (`0 < n <= p.byteLength`) or `null` if
|
||||
* there are no more bytes to read.
|
||||
*
|
||||
* @example Usage
|
||||
* ```ts ignore
|
||||
* import { Tar, Untar } from "@std/archive";
|
||||
* import { assertEquals } from "@std/assert/equals";
|
||||
* import { Buffer } from "@std/io/buffer";
|
||||
*
|
||||
* const content = new TextEncoder().encode("hello tar world!");
|
||||
*
|
||||
* const tar = new Tar();
|
||||
* tar.append("test.txt", {
|
||||
* reader: new Buffer(content),
|
||||
* contentSize: content.byteLength,
|
||||
* });
|
||||
*
|
||||
* const untar = new Untar(tar.getReader());
|
||||
* const entry = await untar.extract();
|
||||
* const buffer = new Uint8Array(1024);
|
||||
* const n = await entry!.read(buffer);
|
||||
*
|
||||
* assertEquals(buffer.subarray(0, n!), content);
|
||||
* ```
|
||||
*/
|
||||
async read(p: Uint8Array): Promise<number | null> {
|
||||
// Bytes left for entry
|
||||
const entryBytesLeft = this.#entrySize - this.#read;
|
||||
const bufSize = Math.min(
|
||||
// bufSize can't be greater than p.length nor bytes left in the entry
|
||||
p.length,
|
||||
entryBytesLeft,
|
||||
);
|
||||
|
||||
if (entryBytesLeft <= 0) {
|
||||
this.#consumed = true;
|
||||
return null;
|
||||
}
|
||||
|
||||
const block = new Uint8Array(bufSize);
|
||||
const n = await readBlock(this.#reader, block);
|
||||
const bytesLeft = this.#size - this.#read;
|
||||
|
||||
this.#read += n ?? 0;
|
||||
if (n === null || bytesLeft <= 0) {
|
||||
if (n === null) this.#consumed = true;
|
||||
return null;
|
||||
}
|
||||
|
||||
// Remove zero filled
|
||||
const offset = bytesLeft < n ? bytesLeft : n;
|
||||
p.set(block.subarray(0, offset), 0);
|
||||
|
||||
return offset < 0 ? n - Math.abs(offset) : offset;
|
||||
}
|
||||
|
||||
/**
|
||||
* Discords the current entry.
|
||||
*
|
||||
* @example Usage
|
||||
* ```ts ignore
|
||||
* import { Buffer } from "@std/io/buffer";
|
||||
* import { TarEntry } from "@std/archive/untar";
|
||||
* import { assertEquals } from "@std/assert/equals";
|
||||
*
|
||||
* const text = "Hello, world!";
|
||||
*
|
||||
* const reader = new Buffer(new TextEncoder().encode(text));
|
||||
* const tarMeta = {
|
||||
* fileName: "text",
|
||||
* fileSize: 0,
|
||||
* fileMode: 509,
|
||||
* mtime: 1591800767,
|
||||
* uid: 1001,
|
||||
* gid: 1001,
|
||||
* owner: "deno",
|
||||
* group: "deno",
|
||||
* type: "file",
|
||||
* };
|
||||
*
|
||||
* const tarEntry: TarEntry = new TarEntry(tarMeta, reader);
|
||||
* await tarEntry.discard();
|
||||
*
|
||||
* assertEquals(tarEntry.consumed, true);
|
||||
* ```
|
||||
*/
|
||||
async discard() {
|
||||
// Discard current entry
|
||||
if (this.#consumed) return;
|
||||
this.#consumed = true;
|
||||
|
||||
if (typeof (this.#reader as Seeker).seek === "function") {
|
||||
await (this.#reader as Seeker).seek(
|
||||
this.#entrySize - this.#read,
|
||||
Deno.SeekMode.Current,
|
||||
);
|
||||
this.#read = this.#entrySize;
|
||||
} else {
|
||||
await readAll(this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ### Overview
|
||||
* A class to extract from a tar archive. Tar archives allow for storing multiple
|
||||
* files in a single file (called an archive, or sometimes a tarball). These
|
||||
* archives typically have the '.tar' extension.
|
||||
*
|
||||
* @deprecated Use {@linkcode https://jsr.io/@std/tar | @std/tar} instead.
|
||||
* `@std/archive` will be removed in the future.
|
||||
*
|
||||
* ### Supported file formats
|
||||
* Only the ustar file format is supported. This is the most common format. The
|
||||
* pax file format may also be read, but additional features, such as longer
|
||||
* filenames may be ignored.
|
||||
*
|
||||
* ### Usage
|
||||
* The workflow is to create a Untar instance referencing the source of the tar file.
|
||||
* You can then use the untar reference to extract files one at a time. See the worked
|
||||
* example below for details.
|
||||
*
|
||||
* ### Understanding compression
|
||||
* A tar archive may be compressed, often identified by the `.tar.gz` extension.
|
||||
* This utility does not support decompression which must be done before extracting
|
||||
* the files.
|
||||
*
|
||||
* @example Usage
|
||||
* ```ts ignore
|
||||
* import { Untar } from "@std/archive/untar";
|
||||
* import { ensureFile } from "@std/fs/ensure-file";
|
||||
* import { ensureDir } from "@std/fs/ensure-dir";
|
||||
* import { copy } from "@std/io/copy";
|
||||
*
|
||||
* using reader = await Deno.open("./out.tar", { read: true });
|
||||
* const untar = new Untar(reader);
|
||||
*
|
||||
* for await (const entry of untar) {
|
||||
* console.log(entry); // metadata
|
||||
*
|
||||
* if (entry.type === "directory") {
|
||||
* await ensureDir(entry.fileName);
|
||||
* continue;
|
||||
* }
|
||||
*
|
||||
* await ensureFile(entry.fileName);
|
||||
* using file = await Deno.open(entry.fileName, { write: true });
|
||||
* // <entry> is a reader.
|
||||
* await copy(entry, file);
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* @experimental **UNSTABLE**: New API, yet to be vetted.
|
||||
*/
|
||||
export class Untar {
|
||||
/** Internal reader. */
|
||||
#reader: Reader;
|
||||
/** Internal block. */
|
||||
#block: Uint8Array;
|
||||
#entry: TarEntry | undefined;
|
||||
|
||||
/**
|
||||
* Constructs a new instance.
|
||||
*
|
||||
* @param reader The reader to extract from.
|
||||
*/
|
||||
constructor(reader: Reader) {
|
||||
this.#reader = reader;
|
||||
this.#block = new Uint8Array(HEADER_LENGTH);
|
||||
}
|
||||
|
||||
#checksum(header: Uint8Array): number {
|
||||
let sum = initialChecksum;
|
||||
for (let i = 0; i < HEADER_LENGTH; i++) {
|
||||
if (i >= 148 && i < 156) {
|
||||
// Ignore checksum header
|
||||
continue;
|
||||
}
|
||||
sum += header[i]!;
|
||||
}
|
||||
return sum;
|
||||
}
|
||||
|
||||
async #getAndValidateHeader(): Promise<TarHeader | null> {
|
||||
await readBlock(this.#reader, this.#block);
|
||||
const header = parseHeader(this.#block);
|
||||
|
||||
// calculate the checksum
|
||||
const decoder = new TextDecoder();
|
||||
const checksum = this.#checksum(this.#block);
|
||||
|
||||
if (parseInt(decoder.decode(header.checksum), 8) !== checksum) {
|
||||
if (checksum === initialChecksum) {
|
||||
// EOF
|
||||
return null;
|
||||
}
|
||||
throw new Error("Cannot validate checksum");
|
||||
}
|
||||
|
||||
const magic = decoder.decode(header.ustar);
|
||||
|
||||
if (magic.indexOf("ustar")) {
|
||||
throw new Error(
|
||||
`Cannot validate the header as it has unsupported archive format: ${magic}`,
|
||||
);
|
||||
}
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
#getMetadata(header: TarHeader): TarMetaWithLinkName {
|
||||
const decoder = new TextDecoder();
|
||||
// get meta data
|
||||
const meta: TarMetaWithLinkName = {
|
||||
fileName: decoder.decode(trim(header.fileName)),
|
||||
};
|
||||
const fileNamePrefix = trim(header.fileNamePrefix);
|
||||
if (fileNamePrefix.byteLength > 0) {
|
||||
meta.fileName = decoder.decode(fileNamePrefix) + "/" + meta.fileName;
|
||||
}
|
||||
(["fileMode", "mtime", "uid", "gid"] as const)
|
||||
.forEach((key) => {
|
||||
const arr = trim(header[key]);
|
||||
if (arr.byteLength > 0) {
|
||||
meta[key] = parseInt(decoder.decode(arr), 8);
|
||||
}
|
||||
});
|
||||
(["owner", "group", "type"] as const)
|
||||
.forEach((key) => {
|
||||
const arr = trim(header[key]);
|
||||
if (arr.byteLength > 0) {
|
||||
meta[key] = decoder.decode(arr);
|
||||
}
|
||||
});
|
||||
|
||||
meta.fileSize = parseInt(decoder.decode(header.fileSize), 8);
|
||||
if (meta.type !== undefined) {
|
||||
meta.type = FileTypes[parseInt(meta.type!)] ?? meta.type;
|
||||
}
|
||||
|
||||
// Only create the `linkName` property for symbolic links to minimize
|
||||
// the effect on existing code that only deals with non-links.
|
||||
if (meta.type === "symlink") {
|
||||
meta.linkName = decoder.decode(trim(header.linkName));
|
||||
}
|
||||
|
||||
return meta;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the next entry of the tar archive.
|
||||
*
|
||||
* @returns A TarEntry with header metadata and a reader to the entry's body,
|
||||
* or null if there are no more entries to extract.
|
||||
*
|
||||
* @example Usage
|
||||
* ```ts ignore
|
||||
* import { Tar, Untar } from "@std/archive";
|
||||
* import { Buffer } from "@std/io/buffer";
|
||||
* import { readAll } from "@std/io/read-all";
|
||||
* import { assertEquals, assertNotEquals } from "@std/assert";
|
||||
*
|
||||
* const content = new TextEncoder().encode("hello tar world!");
|
||||
*
|
||||
* // Create a tar archive
|
||||
* const tar = new Tar();
|
||||
* await tar.append("output.txt", {
|
||||
* reader: new Buffer(content),
|
||||
* contentSize: content.byteLength,
|
||||
* });
|
||||
*
|
||||
* // Read data from a tar archive
|
||||
* const untar = new Untar(tar.getReader());
|
||||
* const result = await untar.extract();
|
||||
*
|
||||
* assertNotEquals(result, null);
|
||||
* assertEquals(result!.fileName, "output.txt");
|
||||
* assertEquals(result!.fileSize, content.byteLength);
|
||||
* assertEquals(result!.type, "file");
|
||||
* assertEquals(await readAll(result!), content);
|
||||
* ```
|
||||
*/
|
||||
async extract(): Promise<TarEntry | null> {
|
||||
if (this.#entry && !this.#entry.consumed) {
|
||||
// If entry body was not read, discard the body
|
||||
// so we can read the next entry.
|
||||
await this.#entry.discard();
|
||||
}
|
||||
|
||||
const header = await this.#getAndValidateHeader();
|
||||
if (header === null) return null;
|
||||
|
||||
const meta = this.#getMetadata(header);
|
||||
|
||||
this.#entry = new TarEntry(meta, this.#reader);
|
||||
|
||||
return this.#entry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate over all entries of the tar archive.
|
||||
*
|
||||
* @yields A TarEntry with tar header metadata and a reader to the entry's body.
|
||||
* @returns An async iterator.
|
||||
*
|
||||
* @example Usage
|
||||
* ```ts ignore
|
||||
* import { Untar } from "@std/archive/untar";
|
||||
* import { ensureFile } from "@std/fs/ensure-file";
|
||||
* import { ensureDir } from "@std/fs/ensure-dir";
|
||||
* import { copy } from "@std/io/copy";
|
||||
*
|
||||
* using reader = await Deno.open("./out.tar", { read: true });
|
||||
* const untar = new Untar(reader);
|
||||
*
|
||||
* for await (const entry of untar) {
|
||||
* console.log(entry); // metadata
|
||||
*
|
||||
* if (entry.type === "directory") {
|
||||
* await ensureDir(entry.fileName);
|
||||
* continue;
|
||||
* }
|
||||
*
|
||||
* await ensureFile(entry.fileName);
|
||||
* using file = await Deno.open(entry.fileName, { write: true });
|
||||
* // <entry> is a reader.
|
||||
* await copy(entry, file);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
async *[Symbol.asyncIterator](): AsyncIterableIterator<TarEntry> {
|
||||
while (true) {
|
||||
const entry = await this.extract();
|
||||
|
||||
if (entry === null) return;
|
||||
|
||||
yield entry;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,386 +0,0 @@
|
||||
// Copyright 2018-2024 the Deno authors. All rights reserved. MIT license.
|
||||
import { assert, assertEquals, assertExists } from "@std/assert";
|
||||
import { resolve } from "@std/path";
|
||||
import { Tar, type TarMeta } from "./tar.ts";
|
||||
import { TarEntry, type TarMetaWithLinkName, Untar } from "./untar.ts";
|
||||
import { Buffer } from "@std/io/buffer";
|
||||
import { copy } from "@std/io/copy";
|
||||
import { readAll } from "@std/io/read-all";
|
||||
import { filePath, testdataDir } from "./_test_utils.ts";
|
||||
|
||||
interface TestEntry {
|
||||
name: string;
|
||||
content?: Uint8Array;
|
||||
filePath?: string;
|
||||
}
|
||||
|
||||
async function createTar(entries: TestEntry[]): Promise<Tar> {
|
||||
const tar = new Tar();
|
||||
// put data on memory
|
||||
for (const file of entries) {
|
||||
let options;
|
||||
|
||||
if (file.content) {
|
||||
options = {
|
||||
reader: new Buffer(file.content),
|
||||
contentSize: file.content.byteLength,
|
||||
};
|
||||
} else {
|
||||
options = { filePath: file.filePath! };
|
||||
}
|
||||
|
||||
await tar.append(file.name, options);
|
||||
}
|
||||
|
||||
return tar;
|
||||
}
|
||||
|
||||
Deno.test("Untar() works as an async iterator", async () => {
|
||||
const entries: TestEntry[] = [
|
||||
{
|
||||
name: "output.txt",
|
||||
content: new TextEncoder().encode("hello tar world!"),
|
||||
},
|
||||
{
|
||||
name: "dir/tar.ts",
|
||||
filePath,
|
||||
},
|
||||
];
|
||||
|
||||
const tar = await createTar(entries);
|
||||
|
||||
// read data from a tar archive
|
||||
const untar = new Untar(tar.getReader());
|
||||
|
||||
let lastEntry;
|
||||
for await (const entry of untar) {
|
||||
const expected = entries.shift();
|
||||
assert(expected);
|
||||
|
||||
let content = expected.content;
|
||||
if (expected.filePath) {
|
||||
content = await Deno.readFile(expected.filePath);
|
||||
}
|
||||
assertEquals(content, await readAll(entry));
|
||||
assertEquals(expected.name, entry.fileName);
|
||||
|
||||
if (lastEntry) assert(lastEntry.consumed);
|
||||
lastEntry = entry;
|
||||
}
|
||||
assert(lastEntry);
|
||||
assert(lastEntry.consumed);
|
||||
assertEquals(entries.length, 0);
|
||||
});
|
||||
|
||||
Deno.test("Untar() reads without body", async () => {
|
||||
const entries: TestEntry[] = [
|
||||
{
|
||||
name: "output.txt",
|
||||
content: new TextEncoder().encode("hello tar world!"),
|
||||
},
|
||||
{
|
||||
name: "dir/tar.ts",
|
||||
filePath,
|
||||
},
|
||||
];
|
||||
|
||||
const tar = await createTar(entries);
|
||||
|
||||
// read data from a tar archive
|
||||
const untar = new Untar(tar.getReader());
|
||||
|
||||
for await (const entry of untar) {
|
||||
const expected = entries.shift();
|
||||
assert(expected);
|
||||
assertEquals(expected.name, entry.fileName);
|
||||
}
|
||||
|
||||
assertEquals(entries.length, 0);
|
||||
});
|
||||
|
||||
Deno.test(
|
||||
"Untar() reads without body from FileReader",
|
||||
async () => {
|
||||
const entries: TestEntry[] = [
|
||||
{
|
||||
name: "output.txt",
|
||||
content: new TextEncoder().encode("hello tar world!"),
|
||||
},
|
||||
{
|
||||
name: "dir/tar.ts",
|
||||
filePath,
|
||||
},
|
||||
];
|
||||
|
||||
const outputFile = resolve(testdataDir, "test.tar");
|
||||
|
||||
const tar = await createTar(entries);
|
||||
using file = await Deno.open(outputFile, { create: true, write: true });
|
||||
await copy(tar.getReader(), file);
|
||||
|
||||
using reader = await Deno.open(outputFile, { read: true });
|
||||
// read data from a tar archive
|
||||
const untar = new Untar(reader);
|
||||
|
||||
for await (const entry of untar) {
|
||||
const expected = entries.shift();
|
||||
assert(expected);
|
||||
assertEquals(expected.name, entry.fileName);
|
||||
}
|
||||
|
||||
await Deno.remove(outputFile);
|
||||
assertEquals(entries.length, 0);
|
||||
},
|
||||
);
|
||||
|
||||
Deno.test("Untar() reads from FileReader", async () => {
|
||||
const entries: TestEntry[] = [
|
||||
{
|
||||
name: "output.txt",
|
||||
content: new TextEncoder().encode("hello tar world!"),
|
||||
},
|
||||
{
|
||||
name: "dir/tar.ts",
|
||||
filePath,
|
||||
},
|
||||
];
|
||||
|
||||
const outputFile = resolve(testdataDir, "test.tar");
|
||||
|
||||
const tar = await createTar(entries);
|
||||
using file = await Deno.open(outputFile, { create: true, write: true });
|
||||
await copy(tar.getReader(), file);
|
||||
|
||||
using reader = await Deno.open(outputFile, { read: true });
|
||||
// read data from a tar archive
|
||||
const untar = new Untar(reader);
|
||||
|
||||
for await (const entry of untar) {
|
||||
const expected = entries.shift();
|
||||
assert(expected);
|
||||
|
||||
let content = expected.content;
|
||||
if (expected.filePath) {
|
||||
content = await Deno.readFile(expected.filePath);
|
||||
}
|
||||
|
||||
assertEquals(content, await readAll(entry));
|
||||
assertEquals(expected.name, entry.fileName);
|
||||
}
|
||||
|
||||
await Deno.remove(outputFile);
|
||||
assertEquals(entries.length, 0);
|
||||
});
|
||||
|
||||
Deno.test(
|
||||
"Untar() reads less than record size",
|
||||
async () => {
|
||||
// record size is 512
|
||||
const bufSizes = [1, 53, 256, 511];
|
||||
|
||||
for (const bufSize of bufSizes) {
|
||||
const entries: TestEntry[] = [
|
||||
{
|
||||
name: "output.txt",
|
||||
content: new TextEncoder().encode("hello tar world!".repeat(100)),
|
||||
},
|
||||
// Need to test at least two files, to make sure the first entry doesn't over-read
|
||||
// Causing the next to fail with: checksum error
|
||||
{
|
||||
name: "deni.txt",
|
||||
content: new TextEncoder().encode("deno!".repeat(250)),
|
||||
},
|
||||
];
|
||||
|
||||
const tar = await createTar(entries);
|
||||
|
||||
// read data from a tar archive
|
||||
const untar = new Untar(tar.getReader());
|
||||
|
||||
for await (const entry of untar) {
|
||||
const expected = entries.shift();
|
||||
assert(expected);
|
||||
assertEquals(expected.name, entry.fileName);
|
||||
|
||||
const writer = new Buffer();
|
||||
while (true) {
|
||||
const buf = new Uint8Array(bufSize);
|
||||
const n = await entry.read(buf);
|
||||
if (n === null) break;
|
||||
|
||||
await writer.write(buf.subarray(0, n));
|
||||
}
|
||||
assertEquals(writer.bytes(), expected!.content);
|
||||
}
|
||||
|
||||
assertEquals(entries.length, 0);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
Deno.test("Untar() works with Linux generated tar", async () => {
|
||||
const filePath = resolve(testdataDir, "deno.tar");
|
||||
using file = await Deno.open(filePath, { read: true });
|
||||
|
||||
type ExpectedEntry = TarMeta & { content?: Uint8Array };
|
||||
|
||||
const expectedEntries: ExpectedEntry[] = [
|
||||
{
|
||||
fileName: "archive/",
|
||||
fileSize: 0,
|
||||
fileMode: 509,
|
||||
mtime: 1591800767,
|
||||
uid: 1001,
|
||||
gid: 1001,
|
||||
owner: "deno",
|
||||
group: "deno",
|
||||
type: "directory",
|
||||
},
|
||||
{
|
||||
fileName: "archive/deno/",
|
||||
fileSize: 0,
|
||||
fileMode: 509,
|
||||
mtime: 1591799635,
|
||||
uid: 1001,
|
||||
gid: 1001,
|
||||
owner: "deno",
|
||||
group: "deno",
|
||||
type: "directory",
|
||||
},
|
||||
{
|
||||
fileName: "archive/deno/land/",
|
||||
fileSize: 0,
|
||||
fileMode: 509,
|
||||
mtime: 1591799660,
|
||||
uid: 1001,
|
||||
gid: 1001,
|
||||
owner: "deno",
|
||||
group: "deno",
|
||||
type: "directory",
|
||||
},
|
||||
{
|
||||
fileName: "archive/deno/land/land.txt",
|
||||
fileMode: 436,
|
||||
fileSize: 5,
|
||||
mtime: 1591799660,
|
||||
uid: 1001,
|
||||
gid: 1001,
|
||||
owner: "deno",
|
||||
group: "deno",
|
||||
type: "file",
|
||||
content: new TextEncoder().encode("land\n"),
|
||||
},
|
||||
{
|
||||
fileName: "archive/file.txt",
|
||||
fileMode: 436,
|
||||
fileSize: 5,
|
||||
mtime: 1591799626,
|
||||
uid: 1001,
|
||||
gid: 1001,
|
||||
owner: "deno",
|
||||
group: "deno",
|
||||
type: "file",
|
||||
content: new TextEncoder().encode("file\n"),
|
||||
},
|
||||
{
|
||||
fileName: "archive/deno.txt",
|
||||
fileMode: 436,
|
||||
fileSize: 5,
|
||||
mtime: 1591799642,
|
||||
uid: 1001,
|
||||
gid: 1001,
|
||||
owner: "deno",
|
||||
group: "deno",
|
||||
type: "file",
|
||||
content: new TextEncoder().encode("deno\n"),
|
||||
},
|
||||
];
|
||||
|
||||
const untar = new Untar(file);
|
||||
|
||||
for await (const entry of untar) {
|
||||
const expected = expectedEntries.shift();
|
||||
assert(expected);
|
||||
const content = expected.content;
|
||||
delete expected.content;
|
||||
|
||||
assertEquals({ ...entry }, expected);
|
||||
|
||||
if (content) {
|
||||
assertEquals(content, await readAll(entry));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Deno.test({
|
||||
name: "TarEntry() handles meta",
|
||||
// only: true,
|
||||
fn() {
|
||||
// test TarEntry class
|
||||
assertExists(TarEntry);
|
||||
const content = new TextEncoder().encode("hello tar world!");
|
||||
const reader = new Buffer(content);
|
||||
const tarMeta = {
|
||||
fileName: "archive/",
|
||||
fileSize: 0,
|
||||
fileMode: 509,
|
||||
mtime: 1591800767,
|
||||
uid: 1001,
|
||||
gid: 1001,
|
||||
owner: "deno",
|
||||
group: "deno",
|
||||
type: "directory",
|
||||
};
|
||||
const tarEntry: TarEntry = new TarEntry(tarMeta, reader);
|
||||
assertExists(tarEntry);
|
||||
},
|
||||
});
|
||||
|
||||
Deno.test("Untar() handles archive with link", async function () {
|
||||
const filePath = resolve(testdataDir, "with_link.tar");
|
||||
using file = await Deno.open(filePath, { read: true });
|
||||
|
||||
type ExpectedEntry = TarMetaWithLinkName & { content?: Uint8Array };
|
||||
|
||||
const expectedEntries: ExpectedEntry[] = [
|
||||
{
|
||||
fileName: "hello.txt",
|
||||
fileMode: 436,
|
||||
fileSize: 14,
|
||||
mtime: 1696384910,
|
||||
uid: 1000,
|
||||
gid: 1000,
|
||||
owner: "user",
|
||||
group: "user",
|
||||
type: "file",
|
||||
content: new TextEncoder().encode("Hello World!\n\n"),
|
||||
},
|
||||
{
|
||||
fileName: "link_to_hello.txt",
|
||||
linkName: "./hello.txt",
|
||||
fileMode: 511,
|
||||
fileSize: 0,
|
||||
mtime: 1696384945,
|
||||
uid: 1000,
|
||||
gid: 1000,
|
||||
owner: "user",
|
||||
group: "user",
|
||||
type: "symlink",
|
||||
},
|
||||
];
|
||||
|
||||
const untar = new Untar(file);
|
||||
|
||||
for await (const entry of untar) {
|
||||
const expected = expectedEntries.shift();
|
||||
assert(expected);
|
||||
const content = expected.content;
|
||||
delete expected.content;
|
||||
|
||||
assertEquals({ ...entry }, expected);
|
||||
|
||||
if (content) {
|
||||
assertEquals(content, await readAll(entry));
|
||||
}
|
||||
}
|
||||
});
|
@ -8,7 +8,6 @@
|
||||
},
|
||||
"importMap": "./import_map.json",
|
||||
"workspace": [
|
||||
"./archive",
|
||||
"./assert",
|
||||
"./async",
|
||||
"./bytes",
|
||||
|
@ -50,7 +50,6 @@
|
||||
}
|
||||
},
|
||||
"workspace": [
|
||||
"./archive",
|
||||
"./assert",
|
||||
"./async",
|
||||
"./bytes",
|
||||
|
@ -6,7 +6,6 @@
|
||||
"automation/": "https://raw.githubusercontent.com/denoland/automation/0.10.0/",
|
||||
"graphviz": "npm:node-graphviz@^0.1.1",
|
||||
|
||||
"@std/archive": "jsr:@std/archive@^0.225.4",
|
||||
"@std/assert": "jsr:@std/assert@^1.0.8",
|
||||
"@std/async": "jsr:@std/async@^1.0.8",
|
||||
"@std/bytes": "jsr:@std/bytes@^1.0.4",
|
||||
|
Loading…
Reference in New Issue
Block a user