Skip to content

Commit e9bdb35

Browse files
authored
VER: Release 0.16.0
2 parents fb9a261 + aab255d commit e9bdb35

File tree

4 files changed

+122
-5
lines changed

4 files changed

+122
-5
lines changed

CHANGELOG.md

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,19 @@
11
# Changelog
22

3-
## 0.15.0 - TBD
3+
## 0.16.0 - TBD
4+
5+
#### Enhancements
6+
- Upgraded DBN version to 0.23.1:
7+
- Added floating-point getters for price fields
8+
- Added new IntelligentCross venues `ASPN`, `ASMT`, and `ASPI`
9+
- Upgraded `thiserror` version to 2.0
10+
11+
#### Deprecations
12+
- Deprecated `Packaging` enum and `packaging` field on `SubmitJobParams` and `BatchJob`.
13+
These will be removed in a future version. All files from a batch job can be downloaded
14+
with the `batch().download()` method on the historical client
15+
16+
## 0.15.0 - 2024-10-22
417

518
#### Enhancements
619
- Upgraded DBN version to 0.23.0:

Cargo.toml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[package]
22
name = "databento"
33
authors = ["Databento <[email protected]>"]
4-
version = "0.15.0"
4+
version = "0.16.0"
55
edition = "2021"
66
repository = "https://github.com/databento/databento-rs"
77
description = "Official Databento client library"
@@ -23,7 +23,7 @@ historical = ["dep:futures", "dep:reqwest", "dep:serde", "dep:tokio-util", "dep:
2323
live = ["dep:hex", "dep:sha2", "tokio/net"]
2424

2525
[dependencies]
26-
dbn = { version = "0.23.0", features = ["async", "serde"] }
26+
dbn = { version = "0.23.1", features = ["async", "serde"] }
2727
# Async stream trait
2828
futures = { version = "0.3", optional = true }
2929
# Used for Live authentication
@@ -43,6 +43,7 @@ typed-builder = "0.20"
4343

4444
[dev-dependencies]
4545
anyhow = "1.0.91"
46+
async-compression = { version = "0.4.13", features = ["tokio", "zstd"] }
4647
clap = { version = "4.5.20", features = ["derive"] }
4748
tempfile = "3.13.0"
4849
tokio = { version = "1.41", features = ["full"] }

examples/split_symbols.rs

Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
//! An example program that splits a DBN file into several DBN files
2+
//! by parent symbol (from the `asset` field in the definitions schema).
3+
use std::collections::HashMap;
4+
5+
use anyhow::Context;
6+
use async_compression::tokio::write::ZstdEncoder;
7+
use databento::{
8+
dbn::{
9+
decode::AsyncDbnDecoder, encode::AsyncDbnEncoder, InstrumentDefMsg, Metadata, Schema,
10+
SymbolIndex,
11+
},
12+
historical::timeseries::GetRangeParams,
13+
HistoricalClient,
14+
};
15+
use tokio::fs::File;
16+
17+
#[tokio::main]
18+
async fn main() -> anyhow::Result<()> {
19+
if std::env::args().len() != 3 {
20+
anyhow::bail!(
21+
"Invalid number of arguments, expected: split_symbols FILE_PATH OUTPUT_PATTERN"
22+
);
23+
}
24+
let file_path = std::env::args().nth(1).unwrap();
25+
let output_pattern = std::env::args().nth(2).unwrap();
26+
if !output_pattern.contains("{parent}") {
27+
anyhow::bail!("OUTPUT_PATTERN should contain {{parent}}");
28+
}
29+
let mut decoder = AsyncDbnDecoder::from_zstd_file(file_path).await?;
30+
31+
let metadata = decoder.metadata().clone();
32+
let symbol_map = metadata.symbol_map()?;
33+
let symbols_to_parent = fetch_symbols_to_parent(&metadata).await?;
34+
let mut encoders = HashMap::<String, AsyncDbnEncoder<ZstdEncoder<File>>>::new();
35+
while let Some(rec) = decoder.decode_record_ref().await? {
36+
let Some(symbol) = symbol_map.get_for_rec(&rec) else {
37+
eprintln!("Missing mapping for {rec:?}");
38+
continue;
39+
};
40+
let Some(parent) = symbols_to_parent.get(symbol) else {
41+
eprintln!("Couldn't find parent mapping for {symbol} with {rec:?}");
42+
continue;
43+
};
44+
if let Some(encoder) = encoders.get_mut(parent) {
45+
encoder.encode_record_ref(rec).await?;
46+
} else {
47+
let mut encoder = AsyncDbnEncoder::with_zstd(
48+
File::create_new(output_pattern.replace("{parent}", parent))
49+
.await
50+
.with_context(|| format!("creating file for {parent}"))?,
51+
&metadata,
52+
)
53+
.await?;
54+
encoder.encode_record_ref(rec).await?;
55+
encoders.insert(parent.clone(), encoder);
56+
};
57+
}
58+
for (parent, encoder) in encoders {
59+
if let Err(e) = encoder.shutdown().await {
60+
eprintln!("Failed to shutdown encoder for {parent}: {e:?}");
61+
}
62+
}
63+
64+
Ok(())
65+
}
66+
67+
async fn fetch_symbols_to_parent(metadata: &Metadata) -> anyhow::Result<HashMap<String, String>> {
68+
let mut client = HistoricalClient::builder().key_from_env()?.build()?;
69+
let end = metadata.end().ok_or_else(|| {
70+
anyhow::format_err!("Missing end in metadata. This script is intended for historical data")
71+
})?;
72+
let mut res = HashMap::new();
73+
// 2000 is the maximum number of symbols per request
74+
for chunk in metadata.symbols.chunks(2000) {
75+
let mut decoder = client
76+
.timeseries()
77+
.get_range(
78+
&GetRangeParams::builder()
79+
.dataset(metadata.dataset.clone())
80+
.schema(Schema::Definition)
81+
.date_time_range((metadata.start(), end))
82+
.symbols(Vec::from(chunk))
83+
.build(),
84+
)
85+
.await?;
86+
while let Some(def) = decoder.decode_record::<InstrumentDefMsg>().await? {
87+
res.insert(def.raw_symbol()?.to_owned(), def.asset()?.to_owned());
88+
}
89+
}
90+
Ok(res)
91+
}

src/historical/batch.rs

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
//! The historical batch download API.
22
3+
#![allow(deprecated)] // Packaging
4+
35
use core::fmt;
46
use std::{
57
collections::HashMap,
@@ -209,6 +211,10 @@ pub enum SplitDuration {
209211

210212
/// How the batch job will be packaged.
211213
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
214+
#[deprecated(
215+
since = "0.16.0",
216+
note = "Use the `download()` method to download the whole job`"
217+
)]
212218
pub enum Packaging {
213219
/// ZIP compressed.
214220
Zip,
@@ -291,6 +297,10 @@ pub struct SubmitJobParams {
291297
pub split_size: Option<NonZeroU64>,
292298
/// The optional archive type to package all batched data files in. Defaults to `None`.
293299
#[builder(default, setter(strip_option))]
300+
#[deprecated(
301+
since = "0.16.0",
302+
note = "Use the `download()` method to download the whole job`"
303+
)]
294304
pub packaging: Option<Packaging>,
295305
/// The delivery mechanism for the batched data files once processed. Defaults to
296306
/// [`Download`](Delivery::Download).
@@ -357,6 +367,10 @@ pub struct BatchJob {
357367
/// The maximum size for an individual file before splitting into multiple files.
358368
pub split_size: Option<NonZeroU64>,
359369
/// The packaging method of the batch data.
370+
#[deprecated(
371+
since = "0.16.0",
372+
note = "Use the `download()` method to download the whole job`"
373+
)]
360374
pub packaging: Option<Packaging>,
361375
/// The delivery mechanism of the batch data.
362376
pub delivery: Delivery,
@@ -476,7 +490,6 @@ impl Packaging {
476490
pub const fn as_str(&self) -> &'static str {
477491
match self {
478492
Packaging::Zip => "zip",
479-
#[allow(deprecated)]
480493
Packaging::Tar => "tar",
481494
}
482495
}
@@ -494,7 +507,6 @@ impl FromStr for Packaging {
494507
fn from_str(s: &str) -> Result<Self, Self::Err> {
495508
match s {
496509
"zip" => Ok(Packaging::Zip),
497-
#[allow(deprecated)]
498510
"tar" => Ok(Packaging::Tar),
499511
_ => Err(crate::Error::bad_arg(
500512
"s",

0 commit comments

Comments
 (0)