Skip to content

Commit 32c418c

Browse files
committed
serve rustdoc-archive-download ourselves, remove public/private feature from storage
1 parent 1b9214b commit 32c418c

File tree

11 files changed

+107
-261
lines changed

11 files changed

+107
-261
lines changed
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
ALTER TABLE files ADD COLUMN public BOOL NOT NULL DEFAULT FALSE;
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
ALTER TABLE files DROP COLUMN public;

src/config.rs

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -45,11 +45,6 @@ pub struct Config {
4545
#[builder(default)]
4646
pub(crate) s3_bucket_is_temporary: bool,
4747

48-
// CloudFront domain which we can access
49-
// public S3 files through
50-
#[cfg_attr(test, builder(setter(into)))]
51-
pub(crate) s3_static_root_path: String,
52-
5348
// Github authentication
5449
pub(crate) github_accesstoken: Option<String>,
5550
pub(crate) github_updater_min_rate_limit: u32,
@@ -209,10 +204,6 @@ impl Config {
209204
.s3_bucket(env("DOCSRS_S3_BUCKET", "rust-docs-rs".to_string())?)
210205
.s3_region(env("S3_REGION", "us-west-1".to_string())?)
211206
.s3_endpoint(maybe_env("S3_ENDPOINT")?)
212-
.s3_static_root_path(env(
213-
"DOCSRS_S3_STATIC_ROOT_PATH",
214-
"https://static.docs.rs".to_string(),
215-
)?)
216207
.github_accesstoken(maybe_env("DOCSRS_GITHUB_ACCESSTOKEN")?)
217208
.github_updater_min_rate_limit(env("DOCSRS_GITHUB_UPDATER_MIN_RATE_LIMIT", 2500u32)?)
218209
.gitlab_accesstoken(maybe_env("DOCSRS_GITLAB_ACCESSTOKEN")?)

src/db/file.rs

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -80,14 +80,10 @@ pub async fn add_path_into_remote_archive<P: AsRef<Path> + std::fmt::Debug>(
8080
storage: &AsyncStorage,
8181
archive_path: &str,
8282
path: P,
83-
public_access: bool,
8483
) -> Result<(Vec<FileEntry>, CompressionAlgorithm)> {
8584
let (file_list, algorithm) = storage
8685
.store_all_in_archive(archive_path, path.as_ref())
8786
.await?;
88-
if public_access {
89-
storage.set_public_access(archive_path, true).await?;
90-
}
9187
Ok((file_list, algorithm))
9288
}
9389

src/docbuilder/rustwide_builder.rs

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -680,7 +680,6 @@ impl RustwideBuilder {
680680
&self.async_storage,
681681
&source_archive_path(name, version),
682682
build.host_source_dir(),
683-
false,
684683
))?;
685684
algs.insert(new_alg);
686685
files_list
@@ -776,7 +775,6 @@ impl RustwideBuilder {
776775
&self.async_storage,
777776
&rustdoc_archive_path(name, version),
778777
local_storage.path(),
779-
true,
780778
))?;
781779
let documentation_size = file_list.iter().map(|info| info.size).sum::<u64>();
782780
self.metrics
@@ -1077,7 +1075,6 @@ impl RustwideBuilder {
10771075

10781076
self.storage
10791077
.store_one_uncompressed(&path, compressed_json.clone())?;
1080-
self.storage.set_public_access(&path, true)?;
10811078
}
10821079
}
10831080

@@ -1641,7 +1638,6 @@ mod tests {
16411638
Some(*alg),
16421639
);
16431640
assert!(storage.exists(&path)?);
1644-
assert!(storage.get_public_access(&path)?);
16451641

16461642
let ext = compression::file_extension_for(*alg);
16471643

src/storage/database.rs

Lines changed: 0 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -33,40 +33,6 @@ impl DatabaseBackend {
3333
.await?)
3434
}
3535

36-
pub(super) async fn get_public_access(&self, path: &str) -> Result<bool> {
37-
match sqlx::query_scalar!(
38-
"SELECT public
39-
FROM files
40-
WHERE path = $1",
41-
path
42-
)
43-
.fetch_optional(&self.pool)
44-
.await?
45-
{
46-
Some(public) => Ok(public),
47-
None => Err(super::PathNotFoundError.into()),
48-
}
49-
}
50-
51-
pub(super) async fn set_public_access(&self, path: &str, public: bool) -> Result<()> {
52-
if sqlx::query!(
53-
"UPDATE files
54-
SET public = $2
55-
WHERE path = $1",
56-
path,
57-
public,
58-
)
59-
.execute(&self.pool)
60-
.await?
61-
.rows_affected()
62-
== 1
63-
{
64-
Ok(())
65-
} else {
66-
Err(super::PathNotFoundError.into())
67-
}
68-
}
69-
7036
pub(super) async fn get_stream(
7137
&self,
7238
path: &str,

src/storage/mod.rs

Lines changed: 0 additions & 66 deletions
Original file line numberDiff line numberDiff line change
@@ -293,22 +293,6 @@ impl AsyncStorage {
293293
}
294294
}
295295

296-
#[instrument]
297-
pub(crate) async fn get_public_access(&self, path: &str) -> Result<bool> {
298-
match &self.backend {
299-
StorageBackend::Database(db) => db.get_public_access(path).await,
300-
StorageBackend::S3(s3) => s3.get_public_access(path).await,
301-
}
302-
}
303-
304-
#[instrument]
305-
pub(crate) async fn set_public_access(&self, path: &str, public: bool) -> Result<()> {
306-
match &self.backend {
307-
StorageBackend::Database(db) => db.set_public_access(path, public).await,
308-
StorageBackend::S3(s3) => s3.set_public_access(path, public).await,
309-
}
310-
}
311-
312296
/// Fetch a rustdoc file from our blob storage.
313297
/// * `name` - the crate name
314298
/// * `version` - the crate version
@@ -935,15 +919,6 @@ impl Storage {
935919
self.runtime.block_on(self.inner.exists(path))
936920
}
937921

938-
pub(crate) fn get_public_access(&self, path: &str) -> Result<bool> {
939-
self.runtime.block_on(self.inner.get_public_access(path))
940-
}
941-
942-
pub(crate) fn set_public_access(&self, path: &str, public: bool) -> Result<()> {
943-
self.runtime
944-
.block_on(self.inner.set_public_access(path, public))
945-
}
946-
947922
pub(crate) fn fetch_source_file(
948923
&self,
949924
name: &str,
@@ -1546,35 +1521,6 @@ mod backend_tests {
15461521
Ok(())
15471522
}
15481523

1549-
fn test_set_public(storage: &Storage) -> Result<()> {
1550-
let path: &str = "foo/bar.txt";
1551-
1552-
storage.store_blobs(vec![BlobUpload {
1553-
path: path.into(),
1554-
mime: mime::TEXT_PLAIN,
1555-
compression: None,
1556-
content: b"test content\n".to_vec(),
1557-
}])?;
1558-
1559-
assert!(!storage.get_public_access(path)?);
1560-
storage.set_public_access(path, true)?;
1561-
assert!(storage.get_public_access(path)?);
1562-
storage.set_public_access(path, false)?;
1563-
assert!(!storage.get_public_access(path)?);
1564-
1565-
for path in &["bar.txt", "baz.txt", "foo/baz.txt"] {
1566-
assert!(
1567-
storage
1568-
.set_public_access(path, true)
1569-
.unwrap_err()
1570-
.downcast_ref::<PathNotFoundError>()
1571-
.is_some()
1572-
);
1573-
}
1574-
1575-
Ok(())
1576-
}
1577-
15781524
fn test_get_object(storage: &Storage) -> Result<()> {
15791525
let path: &str = "foo/bar.txt";
15801526
let blob = BlobUpload {
@@ -1593,9 +1539,6 @@ mod backend_tests {
15931539
// it seems like minio does it too :)
15941540
assert_eq!(found.etag, Some(compute_etag(&blob.content)));
15951541

1596-
// default visibility is private
1597-
assert!(!storage.get_public_access(path)?);
1598-
15991542
for path in &["bar.txt", "baz.txt", "foo/baz.txt"] {
16001543
assert!(
16011544
storage
@@ -1604,14 +1547,6 @@ mod backend_tests {
16041547
.downcast_ref::<PathNotFoundError>()
16051548
.is_some()
16061549
);
1607-
1608-
assert!(
1609-
storage
1610-
.get_public_access(path)
1611-
.unwrap_err()
1612-
.downcast_ref::<PathNotFoundError>()
1613-
.is_some()
1614-
);
16151550
}
16161551

16171552
Ok(())
@@ -2065,7 +2000,6 @@ mod backend_tests {
20652000
test_delete_prefix_without_matches,
20662001
test_delete_percent,
20672002
test_exists_without_remote_archive,
2068-
test_set_public,
20692003
}
20702004

20712005
tests_with_metrics {

src/storage/s3.rs

Lines changed: 1 addition & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use aws_sdk_s3::{
77
Client,
88
config::{Region, retry::RetryConfig},
99
error::{ProvideErrorMetadata, SdkError},
10-
types::{Delete, ObjectIdentifier, Tag, Tagging},
10+
types::{Delete, ObjectIdentifier},
1111
};
1212
use aws_smithy_types_convert::date_time::DateTimeExt;
1313
use axum_extra::headers;
@@ -20,9 +20,6 @@ use futures_util::{
2020
use std::sync::Arc;
2121
use tracing::{error, instrument, warn};
2222

23-
const PUBLIC_ACCESS_TAG: &str = "static-cloudfront-access";
24-
const PUBLIC_ACCESS_VALUE: &str = "allow";
25-
2623
// error codes to check for when trying to determine if an error is
2724
// a "NOT FOUND" error.
2825
// Definition taken from the S3 rust SDK,
@@ -138,49 +135,6 @@ impl S3Backend {
138135
}
139136
}
140137

141-
pub(super) async fn get_public_access(&self, path: &str) -> Result<bool, Error> {
142-
Ok(self
143-
.client
144-
.get_object_tagging()
145-
.bucket(&self.bucket)
146-
.key(path)
147-
.send()
148-
.await
149-
.convert_errors()?
150-
.tag_set()
151-
.iter()
152-
.filter(|tag| tag.key() == PUBLIC_ACCESS_TAG)
153-
.any(|tag| tag.value() == PUBLIC_ACCESS_VALUE))
154-
}
155-
156-
pub(super) async fn set_public_access(&self, path: &str, public: bool) -> Result<(), Error> {
157-
self.client
158-
.put_object_tagging()
159-
.bucket(&self.bucket)
160-
.key(path)
161-
.tagging(if public {
162-
Tagging::builder()
163-
.tag_set(
164-
Tag::builder()
165-
.key(PUBLIC_ACCESS_TAG)
166-
.value(PUBLIC_ACCESS_VALUE)
167-
.build()
168-
.context("could not build tag")?,
169-
)
170-
.build()
171-
.context("could not build tags")?
172-
} else {
173-
Tagging::builder()
174-
.set_tag_set(Some(vec![]))
175-
.build()
176-
.context("could not build tags")?
177-
})
178-
.send()
179-
.await
180-
.convert_errors()
181-
.map(|_| ())
182-
}
183-
184138
#[instrument(skip(self))]
185139
pub(super) async fn get_stream(
186140
&self,

src/test/fakes.rs

Lines changed: 6 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -411,22 +411,14 @@ impl<'a> FakeRelease<'a> {
411411
source_directory.display()
412412
);
413413
if archive_storage {
414-
let (archive, public) = match kind {
415-
FileKind::Rustdoc => {
416-
(rustdoc_archive_path(&package.name, &package.version), true)
417-
}
418-
FileKind::Sources => {
419-
(source_archive_path(&package.name, &package.version), false)
420-
}
414+
let archive = match kind {
415+
FileKind::Rustdoc => rustdoc_archive_path(&package.name, &package.version),
416+
FileKind::Sources => source_archive_path(&package.name, &package.version),
421417
};
422418
debug!("store in archive: {:?}", archive);
423-
let (files_list, new_alg) = crate::db::add_path_into_remote_archive(
424-
storage,
425-
&archive,
426-
source_directory,
427-
public,
428-
)
429-
.await?;
419+
let (files_list, new_alg) =
420+
crate::db::add_path_into_remote_archive(storage, &archive, source_directory)
421+
.await?;
430422
Ok((files_list, new_alg))
431423
} else {
432424
let prefix = match kind {

src/web/extractors/rustdoc.rs

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -574,6 +574,13 @@ impl RustdocParams {
574574
EscapedURI::from_path(path)
575575
}
576576

577+
pub(crate) fn zip_download_url(&self) -> EscapedURI {
578+
EscapedURI::from_path(format!(
579+
"/crate/{}/{}/download",
580+
self.name, self.req_version
581+
))
582+
}
583+
577584
pub(crate) fn json_download_url(
578585
&self,
579586
wanted_compression: Option<CompressionAlgorithm>,
@@ -1806,4 +1813,13 @@ mod tests {
18061813
)
18071814
);
18081815
}
1816+
1817+
#[test]
1818+
fn test_zip_download_url() {
1819+
let params = RustdocParams::new(KRATE).with_req_version(ReqVersion::Exact(V1));
1820+
assert_eq!(
1821+
params.zip_download_url(),
1822+
format!("/crate/{KRATE}/{V1}/download")
1823+
);
1824+
}
18091825
}

0 commit comments

Comments
 (0)