0.3.6: WIP

This commit is contained in:
wisdgod
2025-07-27 08:46:05 +08:00
parent c3bfb3b66e
commit c1a7dd1acb
438 changed files with 169172 additions and 15998 deletions

View File

@@ -1,5 +1,8 @@
# 当前配置为默认值,请根据需要修改
# 服务器监听IP以实际为准
HOST=
# 服务器监听端口
PORT=3000
@@ -9,7 +12,7 @@ ROUTE_PREFIX=
# 最高权限的认证令牌,必填
AUTH_TOKEN=
# 共享的认证令牌仅Chat端点权限(轮询与AUTH_TOKEN同步),无其余权限
# 用于共享的认证令牌仅Chat端点权限(轮询与AUTH_TOKEN同步),无其余权限
SHARED_TOKEN=
# 启用流式响应检查关闭则无法响应错误代价是会对第一个块解析2次(已弃用)
@@ -31,7 +34,7 @@ ENABLE_SLOW_POOL=false
# 允许claude开头的模型请求绕过内置模型限制true/false(已弃用)
# PASS_ANY_CLAUDE=false
# (实验性)是否启用长上下文模式true/false
# 是否启用长上下文模式true/false
ENABLE_LONG_CONTEXT=false
# 图片处理能力配置
@@ -86,18 +89,18 @@ PUB_REVERSE_PROXY_HOST=
# - 该项请到/config设置
# PROXIES=system
# 请求体大小限制(单位为MB
# 默认为2MB (2,097,152 字节)
REQUEST_BODY_LIMIT_MB=2
# 请求体大小限制单位为Byte
# 默认为2MB (2,000,000 字节)
REQUEST_BODY_LIMIT=2000000
# OpenAI 请求时token 和 checksum 的分隔符
TOKEN_DELIMITER=,
# OpenAI 请求时token 和 checksum 的分隔符(已弃用)
# TOKEN_DELIMITER=,
# 同时兼容默认的,作为分隔符
USE_COMMA_DELIMITER=true
# 同时兼容默认的,作为分隔符(已弃用)
# USE_COMMA_DELIMITER=true
# 调试
DEBUG=false
DEBUG=true
# 调试文件
DEBUG_LOG_FILE=debug.log
@@ -129,11 +132,52 @@ GENERAL_TIMEZONE=Asia/Shanghai
# 连续空流阈值达到该值后断开连接默认10(已弃用)
# MAX_EMPTY_STREAM_COUNT=10
# 使用内嵌的Claude.ai官方提示词作为默认提示词如果是claude-开头的模型优先级大于DEFAULT_INSTRUCTIONS
USE_OFFICIAL_CLAUDE_PROMPTS=false
# 使用内嵌的Claude.ai官方提示词作为默认提示词如果是claude-开头的模型优先级大于DEFAULT_INSTRUCTIONS(已弃用)
# USE_OFFICIAL_CLAUDE_PROMPTS=false
# 真实额度(由于Cursor服务本身的问题需要等待约5秒由于架构原因流式可能有bug),否则全零
REAL_USAGE=false
# 禁用HTTP2
# DISABLE_HTTP2=false
# 安全哈希checksum生成更慢
# Cursor客户端版本
CURSOR_CLIENT_VERSION=1.0.0
# 思考标签
THINKING_TAG=think
# 真实额度,否则全零
REAL_USAGE=true
# 安全哈希hash生成更慢与30000秒更新client key和生成checksum有关
SAFE_HASH=true
# 模型数据获取模式
# - 可选值:
# - truncate - 覆盖模式(默认): 完全使用新获取的模型列表,替换所有现有模型
# - append:truncate - 智能合并模式: 保留现有模型中不在新列表中的,同时添加或更新新模型
# - append - 纯追加模式: 只添加不存在的新模型,已有模型保持不变
FETCH_RAW_MODELS=truncate
# 默认代码补全区域(对应token的tags的gcpp_host键)
# - 可选值
# - us-asia.gcpp.cursor.sh | Asia - 距离亚洲节点更近
# - us-eu.gcpp.cursor.sh | EU - 距离欧洲节点更近
# - us-only.gcpp.cursor.sh | US - 距离美国节点更近
GENERAL_GCPP_HOST=Asia
# 短效令牌有效期(秒)(已弃用)
# Web
# 格式: 单个数字或范围 (如 "3600" 或 "3600-5400")
# 示例: 5400 表示令牌有效期为 1.5 小时
# TOKEN_SHORT_VALIDITY=5400
# 长效令牌有效期(秒)(已弃用)
# Session
# 格式: 单个数字或范围 (如 "86400" 或 "86400-5184000")
# 示例: 5184000 表示令牌有效期为 60 天
# TOKEN_LONG_VALIDITY=5184000
# 允许的令牌提供者
ALLOWED_PROVIDERS=auth0,google-oauth2,github
# 绕过模型验证,允许所有模型(会带有一定的性能损失)
BYPASS_MODEL_VALIDATION=false

View File

@@ -13,9 +13,9 @@ on:
required: true
type: boolean
default: true
push:
tags:
- 'v*'
# push:
# tags:
# - 'v*'
env:
IMAGE_NAME: ${{ github.repository_owner }}/cursor-api

14
.gitignore vendored
View File

@@ -15,13 +15,21 @@ node_modules
/cursor-api
/cursor-api.exe
/release
/data
/data*
/*.py
/logs
/dev*
/build*
/build*.sh
/*.bin
/result.txt
tools/tokenizer/
/diff
/Cargo.lock
Cargo.lock
/*api.sh
/cursor-api.tar.gz
/scripts/package-lock.json
/*.sh
/*.tar.gz
/src/core/model/a.rs
/src/core/aiserver/v1/lite.proto
.cargo/config.toml

View File

@@ -1,16 +0,0 @@
# 合规使用指南
attribution_rules:
required_disclaimer:
text: "基于第三方技术构建,与原始开发者无关联"
placement:
- documentation
- marketing_materials
- about_sections
prohibited_actions:
- using_author_name_in_press_releases
- claiming_official_support
- using_project_logo_as_endorsement
enforcement:
grace_period: 72h
compliance_check: https://api.wisdgod.com/license/validate

View File

@@ -1,63 +1,103 @@
cargo-features = ["profile-rustflags", "trim-paths"]
[package]
name = "cursor-api"
version = "0.1.3-rc.5.2.5"
version = "0.3.6"
edition = "2024"
authors = ["wisdgod <nav@wisdgod.com>"]
description = "OpenAI format compatibility layer for the Cursor API"
repository = "https://github.com/wisdgod/cursor-api"
[[bin]]
name = "cursor-api"
path = "src/main.rs"
# [[bin]]
# name = "rkyv-adapter"
# path = "tools/rkyv_adapter/src/main.rs"
[build-dependencies]
prost-build = "^0.13"
sha2 = { version = "^0.10.8", default-features = false }
serde_json = "^1.0"
chrono = { version = "0.4", default-features = false, features = ["alloc"]}
prost-build = { version = "0.14", optional = true }
sha2 = { version = "0.10", default-features = false }
serde_json = "1"
[dependencies]
axum = { version = "^0.8", default-features = false, features = ["http1", "http2", "json", "tokio", "query"] }
base64 = { version = "^0.22", default-features = false, features = ["std"] }
# brotli = { version = "^7.0", default-features = false, features = ["std"] }
bytes = "^1.10"
chrono = { version = "^0.4", default-features = false, features = ["std", "clock", "now", "serde", "rkyv-64"] }
chrono-tz = { version = "^0.10", features = ["serde"] }
dotenvy = "^0.15"
ahash = { version = "0.8", default-features = false, features = ["std", "compile-time-rng", "serde"] }
arc-swap = "1"
axum = { version = "0.8", default-features = false, features = ["http1", "http2", "json", "tokio", "query", "macros"] }
# base62 = "2.2.1"
base64 = { version = "0.22", default-features = false, features = ["std"] }
# bs58 = { version = "0.5.1", default-features = false, features = ["std"] }
# brotli = { version = "7.0", default-features = false, features = ["std"] }
bytes = "1.10"
chrono = { version = "0.4", default-features = false, features = ["alloc", "serde", "rkyv-64"] }
chrono-tz = { version = "0.10", features = ["serde"] }
dotenvy = "0.15"
flate2 = { version = "1", default-features = false, features = ["rust_backend"] }
futures = { version = "^0.3", default-features = false, features = ["std"] }
gif = { version = "^0.13", default-features = false, features = ["std"] }
hex = { version = "^0.4", default-features = false, features = ["std"] }
futures = { version = "0.3", default-features = false, features = ["std"] }
gif = { version = "0.13", default-features = false, features = ["std"] }
hashbrown = { version = "0.15", default-features = false }
hex = { version = "0.4", default-features = false, features = ["std"] }
http = "1"
image = { version = "^0.25", default-features = false, features = ["jpeg", "png", "gif", "webp"] }
lasso = { version = "^0.7", features = ["inline-more", "multi-threaded"] }
memmap2 = "^0.9"
# openssl = { version = "^0.10", features = ["vendored"] }
parking_lot = "^0.12"
paste = "^1.0"
prost = "^0.13"
prost-types = "^0.13"
rand = { version = "^0.9", default-features = false, features = ["thread_rng"] }
reqwest = { version = "^0.12", default-features = false, features = ["gzip", "brotli", "json", "stream", "socks", "__tls", "charset", "rustls-tls-webpki-roots", "macos-system-configuration"] }
rkyv = { version = "^0.7", default-features = false, features = ["alloc", "std", "bytecheck", "size_64", "validation", "std"] }
serde = { version = "^1.0", default-features = false, features = ["std", "derive", "rc"] }
serde_json = { package = "sonic-rs", version = "0.5" }
# serde_json = "^1.0"
sha2 = { version = "^0.10", default-features = false }
sysinfo = { version = "^0.34", default-features = false, features = ["system"] }
tokio = { version = "^1.43", features = ["rt-multi-thread", "macros", "net", "sync", "time", "fs", "signal"] }
# tokio-stream = { version = "^0.1", features = ["time"] }
tower-http = { version = "^0.6", features = ["cors", "limit"] }
url = { version = "^2.5", default-features = false, features = ["serde"] }
uuid = { version = "^1.14", features = ["v4"] }
http-body-util = "0.1"
image = { version = "0.25", default-features = false, features = ["jpeg", "png", "gif", "webp"] }
# lasso = { version = "0.7", features = ["multi-threaded", "ahasher"] }
memmap2 = "0.9"
# openssl = { version = "0.10", features = ["vendored"] }
parking_lot = "0.12"
paste = "1.0"
phf = { version = "0.12", features = ["macros"] }
# pin-project-lite = "0.2"
# pin-project = "1"
prost = "0.14"
prost-types = "0.14"
rand = { version = "0.9", default-features = false, features = ["thread_rng"] }
reqwest = { version = "0.12", default-features = false, features = ["gzip", "brotli", "json", "stream", "socks", "charset", "http2", "macos-system-configuration"] }
rkyv = { version = "0.8", default-features = false, features = ["std", "pointer_width_64", "uuid-1"] }
# rustls = { version = "0.23.26", default-features = false, features = ["std", "tls12"] }
serde = { version = "1", default-features = false, features = ["std", "derive", "rc"] }
# serde_json = { package = "sonic-rs", version = "0" }
serde_json = "1"
sha2 = { version = "0", default-features = false }
sysinfo = { version = "0.36", default-features = false, features = ["system"] }
tokio = { version = "1", features = ["rt-multi-thread", "macros", "net", "sync", "time", "fs", "signal"] }
tokio-util = { version = "0.7", features = ["io"] }
# tokio-tungstenite = { version = "0.26.2", features = ["rustls-tls-webpki-roots"] }
# tokio-stream = { version = "0.1", features = ["time"] }
tower-http = { version = "0.6", features = ["cors", "limit"] }
tracing = { version = "*", default-features = false, features = ["max_level_off", "release_max_level_off"] }
ulid = { version = "1.2", default-features = false, features = ["std", "rkyv"] }
# tracing-subscriber = "0.3"
url = { version = "2.5", default-features = false, features = ["serde"] }
uuid = { version = "1.14", default-features = false, features = ["v4", "fast-rng", "serde"] }
[profile.release]
lto = true
codegen-units = 1
panic = 'abort'
strip = true
# debug = true
# split-debuginfo = 'packed'
# strip = "none"
# panic = 'unwind'
opt-level = 3
trim-paths = "all"
rustflags = ["-Cdebuginfo=0", "-Zthreads=8"]
[features]
default = []
default = ["webpki-roots"]
webpki-roots = ["reqwest/rustls-tls-webpki-roots"]
native-roots = ["reqwest/rustls-tls-native-roots"]
use-minified = []
__preview = []
__protoc = ["prost-build"]
__compat = []
[patch.crates-io]
h2 = { path = "patch/h2-0.4.10" }
reqwest = { path = "patch/reqwest-0.12.18" }
rustls = { path = "patch/rustls-0.23.27" }
chrono = { path = "patch/chrono-0.4.41" }
ulid = { path = "patch/ulid-1.2.1" }
dotenvy = { path = "patch/dotenvy-0.15.7" }
# bs58 = { path = "patch/bs58-0.5.1" }
# base62 = { path = "patch/base62-2.2.1" }

View File

@@ -1,5 +0,0 @@
[target.x86_64-unknown-linux-gnu]
dockerfile = "Dockerfile.cross"
[target.aarch64-unknown-linux-gnu]
dockerfile = "Dockerfile.cross.arm64"

View File

@@ -1,179 +0,0 @@
# Cursor API
## 项目说明
### 版本声明
- 当前版本已进入稳定阶段
- 以下问题与程序无关,请勿反馈:
- 响应缺字漏字
- 首字延迟现象
- 响应出现乱码
- 性能优势:
- 达到原生客户端响应速度
- 部分场景下表现更优
- 开源协议要求:
- Fork 项目禁止以原作者名义进行宣传推广
- 禁止发布任何形式的官方声明
![Cursor API 架构示意图](https://via.placeholder.com/800x400.png?text=Cursor+API+Architecture)
## 快速入门
### 密钥获取
1. 访问 [Cursor 官网](https://www.cursor.com) 完成注册登录
2. 开启浏览器开发者工具 (F12)
3. 在 Application → Cookies 中定位 `WorkosCursorSessionToken`
4. 复制第三个字段值(注意:`%3A%3A``::` 的 URL 编码形式)
## 配置指南
### 环境变量
| 变量名 | 类型 | 默认值 | 说明 |
|--------|------|--------|-----|
| PORT | int | 3000 | 服务端口号 |
| AUTH_TOKEN | string | 无 | 认证令牌(必需) |
| ROUTE_PREFIX | string | 无 | 路由前缀 |
| TOKEN_LIST_FILE | string | .tokens | Token 存储文件 |
完整配置参见 [env-example](/env-example)
### Token 文件规范
`.tokens` 文件格式:
```plaintext
# 注释行将在下次读取时自动删除
token1,checksum1
token2,checksum2
```
文件管理原则:
- 系统自动维护文件内容
- 仅以下情况需要手动编辑:
- 删除特定 token
- 绑定已有 checksum 到指定 token
## 模型支持列表
```json
[
"claude-3.5-sonnet",
"gpt-4",
"gpt-4o",
"cursor-fast",
"gpt-4o-mini",
"deepseek-v3"
]
```
*注:模型列表为固定配置,暂不支持自定义扩展*
## API 文档
### 基础对话接口
**Endpoint**
`POST /v1/chat/completions`
**认证方式**
`Bearer Token` 三级认证机制:
1. 环境变量 `AUTH_TOKEN`
2. `.token` 文件轮询
3. 直接 token,checksum 认证v0.1.3-rc.3+
**请求示例**
```json
{
"model": "gpt-4",
"messages": [
{
"role": "user",
"content": "解释量子计算的基本原理"
}
],
"stream": false
}
```
**响应示例(非流式)**
```json
{
"id": "chatcmpl-9Xy...",
"object": "chat.completion",
"created": 1628063500,
"model": "gpt-4",
"choices": [{
"index": 0,
"message": {
"role": "assistant",
"content": "量子计算基于量子比特..."
},
"finish_reason": "stop"
}]
}
```
### Token 管理接口
| 端点 | 方法 | 功能 |
|------|------|-----|
| `/tokens` | GET | Token 信息管理界面 |
| `/tokens/set` | POST | 批量更新 Token 列表 |
| `/tokens/add` | POST | 增量添加 Token |
| `/tokens/del` | POST | 删除指定 Token |
```mermaid
sequenceDiagram
participant Client
participant API
Client->>API: POST /tokens/add
API->>API: 验证Token有效性
API->>File: 写入.tokens
API-->>Client: 返回更新结果
```
## 高级功能
### 动态密钥生成
**Endpoint**
`POST /build-key`
**优势对比**
| 特性 | 传统模式 | 动态密钥 |
|------|---------|---------|
| 密钥长度 | 较长 | 优化缩短 |
| 配置扩展 | 无 | 支持自定义 |
| 安全等级 | 基础 | 增强编码 |
| 验证效率 | 预校验耗时 | 即时验证 |
## 系统监控
### 健康检查
**Endpoint**
`GET /health`
**响应示例**
```json
{
"status": "success",
"version": "1.2.0",
"uptime": 86400,
"models": ["gpt-4", "claude-3.5"],
"endpoints": ["/v1/chat", "/tokens"]
}
```
## 生态工具
### 开发辅助工具
- [Token 获取工具](https://github.com/wisdgod/cursor-api/tree/main/tools/get-token)
支持 Windows/Linux/macOS 系统
- [遥测数据重置工具](https://github.com/wisdgod/cursor-api/tree/main/tools/reset-telemetry)
清除用户使用数据记录
## 致谢声明
本项目的发展离不开以下开源项目的启发:
- [zhx47/cursor-api](https://github.com/zhx47/cursor-api) - 基础架构参考
- [cursorToApi](https://github.com/luolazyandlazy/cursorToApi) - 认证机制优化方案
---
> **项目维护说明**
> 我们欢迎社区贡献,但请注意:
> 1. 功能请求需附带使用场景说明
> 2. Bug 报告请提供复现步骤和环境信息
> 3. 重要变更需通过 CI/CD 测试流程

View File

@@ -1,35 +1,24 @@
ARG TARGETARCH
FROM --platform=linux/${TARGETARCH} rustlang/rust:nightly-bookworm-slim as builder
FROM --platform=linux/${TARGETARCH} rustlang/rust:nightly-bookworm-slim AS builder
ARG TARGETARCH
WORKDIR /app
RUN apt-get update && \
apt-get install -y --no-install-recommends \
build-essential protobuf-compiler pkg-config libssl-dev nodejs npm openssl \
&& rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y --no-install-recommends build-essential protobuf-compiler nodejs npm musl-tools && rm -rf /var/lib/apt/lists/* && case "$TARGETARCH" in amd64) rustup target add x86_64-unknown-linux-musl ;; arm64) rustup target add aarch64-unknown-linux-musl ;; *) echo "Unsupported architecture for rustup: $TARGETARCH" && exit 1 ;; esac
COPY . .
RUN case "$TARGETARCH" in amd64) TARGET_CPU="x86-64-v2" ;; arm64) TARGET_CPU="neoverse-n1" ;; *) echo "Unsupported architecture: $TARGETARCH" && exit 1 ;; esac && RUSTFLAGS="-C link-arg=-s -C target-cpu=$TARGET_CPU" cargo +nightly build --release && cp target/release/cursor-api /app/cursor-api
RUN case "$TARGETARCH" in amd64) TARGET_TRIPLE="x86_64-unknown-linux-musl"; TARGET_CPU="x86-64-v3" ;; arm64) TARGET_TRIPLE="aarch64-unknown-linux-musl"; TARGET_CPU="neoverse-n1" ;; *) echo "Unsupported architecture: $TARGETARCH" && exit 1 ;; esac && RUSTFLAGS="-C link-arg=-s -C target-feature=+crt-static -C target-cpu=$TARGET_CPU" cargo build --bin cursor-api --release --target=$TARGET_TRIPLE && cp target/$TARGET_TRIPLE/release/cursor-api /app/cursor-api
# 运行阶段
ARG TARGETARCH
FROM --platform=linux/${TARGETARCH} debian:bookworm-slim
FROM scratch
WORKDIR /app
ENV TZ=Asia/Shanghai
RUN apt-get update && \
apt-get install -y --no-install-recommends \
ca-certificates tzdata openssl \
&& rm -rf /var/lib/apt/lists/* && \
groupadd -r cursorapi && useradd -r -g cursorapi cursorapi
COPY --from=builder /app/cursor-api .
RUN chown -R cursorapi:cursorapi /app
ENV PORT=3000
EXPOSE ${PORT}
USER cursorapi
CMD ["./cursor-api"]
USER 1001
ENTRYPOINT ["/app/cursor-api"]

View File

@@ -1,31 +0,0 @@
# Dockerfile.cross
FROM --platform=linux/amd64 rustlang/rust:nightly-bookworm-slim
WORKDIR /app
# 安装必要的软件包
RUN apt-get update && \
apt-get install -y --no-install-recommends \
build-essential \
pkg-config \
libssl-dev \
protobuf-compiler \
openssl \
&& rm -rf /var/lib/apt/lists/*
# 设置环境变量 (如果需要)
# ENV RUSTFLAGS="-C link-arg=-s"
# 设置 PROTOC 环境变量 (因为你的 build.rs 需要)
ENV PROTOC=/usr/bin/protoc
# 安装特定版本的 protoc (如果你需要特定版本,例如 29.3;否则可以删除这部分)
# ENV PROTOC_VERSION=29.3
# ENV PROTOC_ZIP=protoc-${PROTOC_VERSION}-linux-x86_64.zip
# RUN wget https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOC_VERSION}/${PROTOC_ZIP} -O /tmp/${PROTOC_ZIP} && \
# unzip /tmp/${PROTOC_ZIP} -d /usr && \
# rm /tmp/${PROTOC_ZIP}
# 验证安装
RUN protoc --version

View File

@@ -1,31 +0,0 @@
# Dockerfile.cross
FROM --platform=linux/arm64/v8 rust:1-slim-bookworm
WORKDIR /app
# 安装必要的软件包
RUN apt-get update && \
apt-get install -y --no-install-recommends \
build-essential \
pkg-config \
libssl-dev \
protobuf-compiler \
openssl \
&& rm -rf /var/lib/apt/lists/*
# 设置环境变量 (如果需要)
# ENV RUSTFLAGS="-C link-arg=-s"
# 设置 PROTOC 环境变量 (因为你的 build.rs 需要)
ENV PROTOC=/usr/bin/protoc
# 安装特定版本的 protoc (如果你需要特定版本,例如 29.3;否则可以删除这部分)
# ENV PROTOC_VERSION=29.3
# ENV PROTOC_ZIP=protoc-${PROTOC_VERSION}-linux-x86_64.zip
# RUN wget https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOC_VERSION}/${PROTOC_ZIP} -O /tmp/${PROTOC_ZIP} && \
# unzip /tmp/${PROTOC_ZIP} -d /usr && \
# rm /tmp/${PROTOC_ZIP}
# 验证安装
RUN protoc --version

1667
README.md

File diff suppressed because it is too large Load Diff

View File

@@ -1 +1 @@
25
88

222
build.rs
View File

@@ -51,9 +51,7 @@ fn get_files_hash() -> Result<HashMap<PathBuf, String>> {
let readme_path = Path::new("README.md");
if readme_path.exists() {
let content = fs::read(readme_path)?;
let mut hasher = Sha256::new();
hasher.update(&content);
let hash = format!("{:x}", hasher.finalize());
let hash = format!("{:x}", Sha256::new().chain_update(&content).finalize());
file_hashes.insert(readme_path.to_path_buf(), hash);
}
@@ -63,15 +61,13 @@ fn get_files_hash() -> Result<HashMap<PathBuf, String>> {
let path = entry.path();
// 检查是否是支持的文件类型,且不是已经压缩的文件
if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
if SUPPORTED_EXTENSIONS.contains(&ext) && !path.to_string_lossy().contains(".min.")
{
let content = fs::read(&path)?;
let mut hasher = Sha256::new();
hasher.update(&content);
let hash = format!("{:x}", hasher.finalize());
file_hashes.insert(path, hash);
}
if let Some(ext) = path.extension().and_then(|e| e.to_str())
&& SUPPORTED_EXTENSIONS.contains(&ext)
&& !path.to_string_lossy().contains(".min.")
{
let content = fs::read(&path)?;
let hash = format!("{:x}", Sha256::new().chain_update(&content).finalize());
file_hashes.insert(path, hash);
}
}
}
@@ -172,92 +168,133 @@ fn minify_assets() -> Result<()> {
Ok(())
}
/**
* 更新版本号函数
* 此函数会读取 VERSION 文件中的数字将其加1然后保存回文件
* 如果 VERSION 文件不存在或为空将从1开始计数
* 只在 release 模式下执行debug/dev 模式下完全跳过
*/
#[cfg(not(debug_assertions))]
#[cfg(feature = "__preview")]
fn update_version() -> Result<()> {
let version_path = "VERSION";
// VERSION文件的监控已经在main函数中添加此处无需重复
include!("build_info.rs");
// 读取当前版本号
let mut version = String::new();
let mut file = match File::open(version_path) {
Ok(file) => file,
Err(_) => {
// 如果文件不存在或无法打开从1开始
println!("cargo:warning=VERSION file not found, creating with initial value 1");
let mut new_file = File::create(version_path)?;
new_file.write_all(b"1")?;
return Ok(());
#[cfg(feature = "__protoc")]
macro_rules! proto_attributes {
(config: $config:expr, paths: $paths:expr, attributes: [$($attr:expr),* $(,)?]) => {
for path in $paths {
$(
$config.type_attribute(path, $attr);
)*
}
};
file.read_to_string(&mut version)?;
// 确保版本号是有效数字
let version_num = match version.trim().parse::<u64>() {
Ok(num) => num,
Err(_) => {
println!("cargo:warning=Invalid version number in VERSION file. Setting to 1.");
let mut file = File::create(version_path)?;
file.write_all(b"1")?;
return Ok(());
}
};
// 版本号加1
let new_version = version_num + 1;
println!(
"cargo:warning=Release build - bumping version from {} to {}",
version_num, new_version
);
// 写回文件
let mut file = File::create(version_path)?;
file.write_all(new_version.to_string().as_bytes())?;
Ok(())
}
fn main() -> Result<()> {
// 更新版本号 - 只在 release 构建时执行
#[cfg(not(debug_assertions))]
#[cfg(feature = "__preview")]
#[cfg(all(not(debug_assertions), feature = "__preview"))]
update_version()?;
// Proto 文件处理
// println!("cargo:rerun-if-changed=src/core/aiserver/v1/lite.proto");
// println!("cargo:rerun-if-changed=src/core/config/key.proto");
// 获取环境变量 PROTOC
// let protoc_path = match std::env::var_os("PROTOC") {
// Some(path) => PathBuf::from(path),
// None => {
// println!("cargo:warning=PROTOC environment variable not set, using default protoc.");
// // 如果 PROTOC 未设置,则返回一个空的 PathBufprost-build 会尝试使用默认的 protoc
// PathBuf::new()
// }
// };
// let mut config = prost_build::Config::new();
// // 如果 protoc_path 不为空,则配置使用指定的 protoc
// if !protoc_path.as_os_str().is_empty() {
// config.protoc_executable(protoc_path);
// }
// config.type_attribute(".", "#[derive(serde::Serialize, serde::Deserialize)]");
// config.enum_attribute(".aiserver.v1", "#[allow(clippy::enum_variant_names)]");
// config
// .compile_protos(
// &["src/core/aiserver/v1/lite.proto"],
// &["src/core/aiserver/v1/"],
// )
// .unwrap();
// config
// .compile_protos(&["src/core/config/key.proto"], &["src/core/config/"])
// .unwrap();
#[cfg(feature = "__protoc")]
{
// Proto 文件处理
println!("cargo:rerun-if-changed=src/core/aiserver/v1/lite.proto");
println!("cargo:rerun-if-changed=src/core/config/key.proto");
// 获取环境变量 PROTOC 并创建配置
let mut config = prost_build::Config::new();
// 检查环境变量是否设置
match std::env::var_os("PROTOC") {
Some(path) => {
// 有环境变量时直接配置
config.protoc_executable(PathBuf::from(path));
}
None => {
// 无环境变量时输出警告,使用默认 protoc
println!(
"cargo:warning=PROTOC environment variable not set, using default protoc."
);
// 这里不需要额外操作prost-build 会自动使用默认的 protoc
}
}
// config.type_attribute(".", "#[derive(serde::Serialize, serde::Deserialize)]");
// config.enum_attribute(".aiserver.v1", "#[allow(clippy::enum_variant_names)]");
for p in [
".aiserver.v1.CppSessionEvent.event.git_context_event",
".aiserver.v1.CppTimelineEvent.v.event",
".aiserver.v1.StreamAiLintBugResponse.response.bug",
".aiserver.v1.StreamChatToolformerResponse.response_type.tool_action",
".aiserver.v1.TaskStreamLogResponse.response.streamed_log_item",
".aiserver.v1.StreamUnifiedChatRequestWithTools.request.stream_unified_chat_request",
".aiserver.v1.StreamUnifiedChatRequestWithTools.request.client_side_tool_v2_result",
".aiserver.v1.StreamUnifiedChatResponseWithTools.response.client_side_tool_v2_call",
".aiserver.v1.StreamUnifiedChatResponseWithTools.response.stream_unified_chat_response",
] {
config.boxed(p);
}
proto_attributes! {
config: config,
paths: [
".aiserver.v1.CursorPosition",
".aiserver.v1.SimplestRange",
".aiserver.v1.SimpleRange",
".aiserver.v1.LineRange",
".aiserver.v1.CursorRange",
".aiserver.v1.Diagnostic",
".aiserver.v1.BM25Chunk",
".aiserver.v1.CurrentFileInfo",
".aiserver.v1.DataframeInfo",
".aiserver.v1.LinterError",
".aiserver.v1.LinterErrors",
".aiserver.v1.LspSubgraphPosition",
".aiserver.v1.LspSubgraphRange",
".aiserver.v1.LspSubgraphContextItem",
".aiserver.v1.LspSubgraphFullContext",
".aiserver.v1.FSUploadFileRequest",
".aiserver.v1.FilesyncUpdateWithModelVersion",
".aiserver.v1.SingleUpdateRequest",
".aiserver.v1.FSSyncFileRequest",
".aiserver.v1.CppIntentInfo",
".aiserver.v1.LspSuggestion",
".aiserver.v1.LspSuggestedItems",
".aiserver.v1.StreamCppRequest",
".aiserver.v1.CppConfigRequest",
".aiserver.v1.AdditionalFile",
".aiserver.v1.AvailableCppModelsRequest",
".aiserver.v1.CppFileDiffHistory",
".aiserver.v1.CppContextItem",
".aiserver.v1.CppParameterHint",
".aiserver.v1.IRange",
".aiserver.v1.BlockDiffPatch",
".aiserver.v1.AvailableModelsRequest",
],
attributes: [
"#[derive(::serde::Deserialize)]",
]
}
proto_attributes! {
config: config,
paths: &[
".aiserver.v1.LineRange",
".aiserver.v1.FSUploadErrorType",
".aiserver.v1.FSSyncErrorType",
".aiserver.v1.FSUploadFileResponse",
".aiserver.v1.FSSyncFileResponse",
".aiserver.v1.StreamCppResponse",
".aiserver.v1.CppConfigResponse",
".aiserver.v1.AvailableCppModelsResponse",
".aiserver.v1.AvailableModelsResponse",
],
attributes: [
"#[derive(::serde::Serialize)]",
]
}
config
.compile_protos(
&["src/core/aiserver/v1/lite.proto"],
&["src/core/aiserver/v1/"],
)
.unwrap();
config
.compile_protos(&["src/core/config/key.proto"], &["src/core/config/"])
.unwrap();
}
// 静态资源文件处理
println!("cargo:rerun-if-changed=scripts/minify.js");
@@ -266,13 +303,15 @@ fn main() -> Result<()> {
println!("cargo:rerun-if-changed=static/build_key.html");
println!("cargo:rerun-if-changed=static/config.html");
println!("cargo:rerun-if-changed=static/logs.html");
println!("cargo:rerun-if-changed=static/proxies.html");
println!("cargo:rerun-if-changed=static/shared-styles.css");
println!("cargo:rerun-if-changed=static/shared.js");
println!("cargo:rerun-if-changed=static/tokens.html");
println!("cargo:rerun-if-changed=README.md");
// 只在release模式下监控VERSION文件变化
#[cfg(not(debug_assertions))]
#[cfg(feature = "__preview")]
println!("cargo:rerun-if-changed=VERSION");
#[cfg(not(any(feature = "use-minified")))]
@@ -284,5 +323,8 @@ fn main() -> Result<()> {
minify_assets()?;
}
// 生成构建信息文件
generate_build_info()?;
Ok(())
}

147
build_info.rs Normal file
View File

@@ -0,0 +1,147 @@
/**
* 更新版本号函数
* 此函数会读取 VERSION 文件中的数字将其加1然后保存回文件
* 如果 VERSION 文件不存在或为空将从1开始计数
* 只在 release 模式下执行debug/dev 模式下完全跳过
*/
#[cfg(not(debug_assertions))]
#[cfg(feature = "__preview")]
fn update_version() -> Result<()> {
let version_path = "VERSION";
// VERSION文件的监控已经在main函数中添加此处无需重复
// 读取当前版本号
let mut version = String::new();
let mut file = match File::open(version_path) {
Ok(file) => file,
Err(_) => {
// 如果文件不存在或无法打开从1开始
println!("cargo:warning=VERSION file not found, creating with initial value 1");
let mut new_file = File::create(version_path)?;
new_file.write_all(b"1")?;
return Ok(());
}
};
file.read_to_string(&mut version)?;
// 确保版本号是有效数字
let version_num = match version.trim().parse::<u64>() {
Ok(num) => num,
Err(_) => {
println!("cargo:warning=Invalid version number in VERSION file. Setting to 1.");
let mut file = File::create(version_path)?;
file.write_all(b"1")?;
return Ok(());
}
};
// 版本号加1
let new_version = version_num + 1;
println!(
"cargo:warning=Release build - bumping version from {} to {}",
version_num, new_version
);
// 写回文件
let mut file = File::create(version_path)?;
file.write_all(new_version.to_string().as_bytes())?;
Ok(())
}
#[cfg(feature = "__preview")]
fn read_version_number() -> Result<u64> {
let mut version = String::with_capacity(4);
match std::fs::File::open("VERSION") {
Ok(mut file) => {
use std::io::Read as _;
file.read_to_string(&mut version)?;
Ok(version.trim().parse().unwrap_or(1))
}
Err(_) => Ok(1),
}
}
fn generate_build_info() -> Result<()> {
// let out_dir = std::env::var("OUT_DIR").unwrap();
// let dest_path = Path::new(out_dir).join("build_info.rs");
#[cfg(debug_assertions)]
let out_dir = "target/debug/build/build_info.rs";
#[cfg(not(debug_assertions))]
let out_dir = "target/release/build/build_info.rs";
let dest_path = Path::new(out_dir);
if dest_path.is_file() {
return Ok(());
}
let build_timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap()
.as_secs();
let build_timestamp_str = chrono::DateTime::from_timestamp(build_timestamp as i64, 0)
.unwrap()
.to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
let pkg_version = env!("CARGO_PKG_VERSION");
#[cfg(feature = "__preview")]
let (version_str, build_version_str) = {
let build_num = read_version_number()?;
(
format!("{pkg_version}+build.{build_num}"),
format!("pub const BUILD_VERSION: u32 = {build_num};\n"),
)
};
#[cfg(not(feature = "__preview"))]
let (version_str, build_version_str) = (pkg_version, "");
let build_info_content = format!(
r#"// 此文件由 build.rs 自动生成,请勿手动修改
{build_version_str}pub const BUILD_TIMESTAMP: &'static str = {build_timestamp_str:?};
pub const VERSION: &'static str = {version_str:?};
pub const IS_PRERELEASE: bool = {is_prerelease};
pub const IS_DEBUG: bool = {is_debug};
#[cfg(unix)]
pub const BUILD_EPOCH: std::time::SystemTime = unsafe {{
#[repr(C)]
struct UnixSystemTime {{
tv_sec: i64,
tv_nsec: u32,
}}
::core::mem::transmute(UnixSystemTime {{
tv_sec: {build_timestamp},
tv_nsec: 0,
}})
}};
#[cfg(windows)]
pub const BUILD_EPOCH: std::time::SystemTime = unsafe {{
#[repr(C)]
struct WindowsFileTime {{
dw_low_date_time: u32,
dw_high_date_time: u32,
}}
const INTERVALS_PER_SEC: u64 = 10_000_000;
const INTERVALS_TO_UNIX_EPOCH: u64 = 11_644_473_600 * INTERVALS_PER_SEC;
const TARGET_INTERVALS: u64 = INTERVALS_TO_UNIX_EPOCH + {build_timestamp} * INTERVALS_PER_SEC;
::core::mem::transmute(WindowsFileTime {{
dw_low_date_time: TARGET_INTERVALS as u32,
dw_high_date_time: (TARGET_INTERVALS >> 32) as u32,
}})
}};
"#,
is_prerelease = cfg!(feature = "__preview"),
is_debug = cfg!(debug_assertions),
);
std::fs::write(dest_path, build_info_content)?;
Ok(())
}

View File

@@ -0,0 +1,33 @@
# Parser settings.
cff-version: 1.2.0
message: Please cite this crate using these information.
# Version information.
date-released: 2025-02-26
version: 0.4.41
# Project information.
abstract: Date and time library for Rust
authors:
- alias: quodlibetor
family-names: Maister
given-names: Brandon W.
- alias: djc
family-names: Ochtman
given-names: Dirkjan
- alias: lifthrasiir
family-names: Seonghoon
given-names: Kang
- alias: esheppa
family-names: Sheppard
given-names: Eric
- alias: pitdicker
family-names: Dicker
given-names: Paul
license:
- Apache-2.0
- MIT
repository-artifact: https://crates.io/crates/chrono
repository-code: https://github.com/chronotope/chrono
title: chrono
url: https://docs.rs/chrono

View File

@@ -0,0 +1,73 @@
[package]
name = "chrono"
version = "0.4.41"
description = "Date and time library for Rust"
homepage = "https://github.com/chronotope/chrono"
documentation = "https://docs.rs/chrono/"
repository = "https://github.com/chronotope/chrono"
keywords = ["date", "time", "calendar"]
categories = ["date-and-time"]
readme = "README.md"
license = "MIT OR Apache-2.0"
include = ["src/*", "tests/*.rs", "LICENSE.txt", "CITATION.cff"]
edition = "2021"
rust-version = "1.62.0"
[lib]
name = "chrono"
[features]
# Don't forget to adjust `ALL_NON_EXCLUSIVE_FEATURES` in CI scripts when adding a feature or an optional dependency.
default = ["clock", "std", "oldtime", "wasmbind"]
alloc = []
libc = []
winapi = ["windows-link"]
std = ["alloc"]
clock = ["winapi", "iana-time-zone", "now"]
now = ["std"]
oldtime = []
wasmbind = ["wasm-bindgen", "js-sys"]
unstable-locales = ["pure-rust-locales"]
# Note that rkyv-16, rkyv-32, and rkyv-64 are mutually exclusive.
rkyv-16 = ["dep:rkyv", "rkyv?/pointer_width_16"]
rkyv-32 = ["dep:rkyv", "rkyv?/pointer_width_32"]
rkyv-64 = ["dep:rkyv", "rkyv?/pointer_width_64"]
rkyv-validation = ["rkyv?/bytecheck"]
# Features for internal use only:
__internal_bench = []
[dependencies]
num-traits = { version = "0.2", default-features = false }
serde = { version = "1.0.99", default-features = false, optional = true }
pure-rust-locales = { version = "0.8", optional = true }
rkyv = { version = "0.8.10", optional = true, default-features = false, features = ["std"]}
arbitrary = { version = "1.0.0", features = ["derive"], optional = true }
[target.'cfg(all(target_arch = "wasm32", not(any(target_os = "emscripten", target_os = "wasi"))))'.dependencies]
wasm-bindgen = { version = "0.2", optional = true }
js-sys = { version = "0.3", optional = true } # contains FFI bindings for the JS Date API
[target.'cfg(windows)'.dependencies]
windows-link = { version = "0.1", optional = true }
[target.'cfg(windows)'.dev-dependencies]
windows-bindgen = { version = "0.62" } # MSRV is 1.74
[target.'cfg(unix)'.dependencies]
iana-time-zone = { version = "0.1.45", optional = true, features = ["fallback"] }
[dev-dependencies]
serde_json = { version = "1" }
serde_derive = { version = "1", default-features = false }
similar-asserts = { version = "1.6.1" }
bincode = { version = "1.3.0" }
[target.'cfg(all(target_arch = "wasm32", not(any(target_os = "emscripten", target_os = "wasi"))))'.dev-dependencies]
wasm-bindgen-test = "0.3"
[package.metadata.docs.rs]
features = ["arbitrary", "rkyv", "serde", "unstable-locales"]
rustdoc-args = ["--cfg", "docsrs"]
[package.metadata.playground]
features = ["serde"]

View File

@@ -0,0 +1,240 @@
Rust-chrono is dual-licensed under The MIT License [1] and
Apache 2.0 License [2]. Copyright (c) 2014--2025, Kang Seonghoon and
contributors.
Nota Bene: This is same as the Rust Project's own license.
[1]: <http://opensource.org/licenses/MIT>, which is reproduced below:
~~~~
The MIT License (MIT)
Copyright (c) 2014, Kang Seonghoon.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
~~~~
[2]: <http://www.apache.org/licenses/LICENSE-2.0>, which is reproduced below:
~~~~
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
~~~~

View File

@@ -0,0 +1,92 @@
[Chrono][docsrs]: Timezone-aware date and time handling
========================================
[![Chrono GitHub Actions][gh-image]][gh-checks]
[![Chrono on crates.io][cratesio-image]][cratesio]
[![Chrono on docs.rs][docsrs-image]][docsrs]
[![Chat][discord-image]][discord]
[![codecov.io][codecov-img]][codecov-link]
[gh-image]: https://github.com/chronotope/chrono/actions/workflows/test.yml/badge.svg?branch=main
[gh-checks]: https://github.com/chronotope/chrono/actions/workflows/test.yml?query=branch%3Amain
[cratesio-image]: https://img.shields.io/crates/v/chrono.svg
[cratesio]: https://crates.io/crates/chrono
[docsrs-image]: https://docs.rs/chrono/badge.svg
[docsrs]: https://docs.rs/chrono
[discord-image]: https://img.shields.io/discord/976380008299917365?logo=discord
[discord]: https://discord.gg/sXpav4PS7M
[codecov-img]: https://img.shields.io/codecov/c/github/chronotope/chrono?logo=codecov
[codecov-link]: https://codecov.io/gh/chronotope/chrono
Chrono aims to provide all functionality needed to do correct operations on dates and times in the
[proleptic Gregorian calendar](https://en.wikipedia.org/wiki/Proleptic_Gregorian_calendar):
* The [`DateTime`](https://docs.rs/chrono/latest/chrono/struct.DateTime.html) type is timezone-aware
by default, with separate timezone-naive types.
* Operations that may produce an invalid or ambiguous date and time return `Option` or
[`MappedLocalTime`](https://docs.rs/chrono/latest/chrono/offset/enum.MappedLocalTime.html).
* Configurable parsing and formatting with an `strftime` inspired date and time formatting syntax.
* The [`Local`](https://docs.rs/chrono/latest/chrono/offset/struct.Local.html) timezone works with
the current timezone of the OS.
* Types and operations are implemented to be reasonably efficient.
Timezone data is not shipped with chrono by default to limit binary sizes. Use the companion crate
[Chrono-TZ](https://crates.io/crates/chrono-tz) or [`tzfile`](https://crates.io/crates/tzfile) for
full timezone support.
## Documentation
See [docs.rs](https://docs.rs/chrono/latest/chrono/) for the API reference.
## Limitations
* Only the proleptic Gregorian calendar (i.e. extended to support older dates) is supported.
* Date types are limited to about +/- 262,000 years from the common epoch.
* Time types are limited to nanosecond accuracy.
* Leap seconds can be represented, but Chrono does not fully support them.
See [Leap Second Handling](https://docs.rs/chrono/latest/chrono/naive/struct.NaiveTime.html#leap-second-handling).
## Crate features
Default features:
* `alloc`: Enable features that depend on allocation (primarily string formatting).
* `std`: Enables functionality that depends on the standard library. This is a superset of `alloc`
and adds interoperation with standard library types and traits.
* `clock`: Enables reading the local timezone (`Local`). This is a superset of `now`.
* `now`: Enables reading the system time (`now`).
* `wasmbind`: Interface with the JS Date API for the `wasm32` target.
Optional features:
* `serde`: Enable serialization/deserialization via [serde].
* `rkyv`: Deprecated, use the `rkyv-*` features.
* `rkyv-16`: Enable serialization/deserialization via [rkyv], using 16-bit integers for integral `*size` types.
* `rkyv-32`: Enable serialization/deserialization via [rkyv], using 32-bit integers for integral `*size` types.
* `rkyv-64`: Enable serialization/deserialization via [rkyv], using 64-bit integers for integral `*size` types.
* `rkyv-validation`: Enable rkyv validation support using `bytecheck`.
* `arbitrary`: Construct arbitrary instances of a type with the Arbitrary crate.
* `unstable-locales`: Enable localization. This adds various methods with a `_localized` suffix.
The implementation and API may change or even be removed in a patch release. Feedback welcome.
* `oldtime`: This feature no longer has any effect; it used to offer compatibility with the `time` 0.1 crate.
Note: The `rkyv{,-16,-32,-64}` features are mutually exclusive.
[serde]: https://github.com/serde-rs/serde
[rkyv]: https://github.com/rkyv/rkyv
## Rust version requirements
The Minimum Supported Rust Version (MSRV) is currently **Rust 1.61.0**.
The MSRV is explicitly tested in CI. It may be bumped in minor releases, but this is not done
lightly.
## License
This project is licensed under either of
* [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0)
* [MIT License](https://opensource.org/licenses/MIT)
at your option.

View File

@@ -0,0 +1,663 @@
// This is a part of Chrono.
// See README.md and LICENSE.txt for details.
//! ISO 8601 calendar date with time zone.
#![allow(deprecated)]
#[cfg(feature = "alloc")]
use core::borrow::Borrow;
use core::cmp::Ordering;
use core::ops::{Add, AddAssign, Sub, SubAssign};
use core::{fmt, hash};
// #[cfg(feature = "rkyv")]
// use rkyv::{Archive, Deserialize, Serialize};
#[cfg(all(feature = "unstable-locales", feature = "alloc"))]
use crate::format::Locale;
#[cfg(feature = "alloc")]
use crate::format::{DelayedFormat, Item, StrftimeItems};
use crate::naive::{IsoWeek, NaiveDate, NaiveTime};
use crate::offset::{TimeZone, Utc};
use crate::{DateTime, Datelike, TimeDelta, Weekday};
/// ISO 8601 calendar date with time zone.
///
/// You almost certainly want to be using a [`NaiveDate`] instead of this type.
///
/// This type primarily exists to aid in the construction of DateTimes that
/// have a timezone by way of the [`TimeZone`] datelike constructors (e.g.
/// [`TimeZone::ymd`]).
///
/// This type should be considered ambiguous at best, due to the inherent lack
/// of precision required for the time zone resolution.
///
/// There are some guarantees on the usage of `Date<Tz>`:
///
/// - If properly constructed via [`TimeZone::ymd`] and others without an error,
/// the corresponding local date should exist for at least a moment.
/// (It may still have a gap from the offset changes.)
///
/// - The `TimeZone` is free to assign *any* [`Offset`](crate::offset::Offset) to the
/// local date, as long as that offset did occur in given day.
///
/// For example, if `2015-03-08T01:59-08:00` is followed by `2015-03-08T03:00-07:00`,
/// it may produce either `2015-03-08-08:00` or `2015-03-08-07:00`
/// but *not* `2015-03-08+00:00` and others.
///
/// - Once constructed as a full `DateTime`, [`DateTime::date`] and other associated
/// methods should return those for the original `Date`. For example, if `dt =
/// tz.ymd_opt(y,m,d).unwrap().hms(h,n,s)` were valid, `dt.date() == tz.ymd_opt(y,m,d).unwrap()`.
///
/// - The date is timezone-agnostic up to one day (i.e. practically always),
/// so the local date and UTC date should be equal for most cases
/// even though the raw calculation between `NaiveDate` and `TimeDelta` may not.
#[deprecated(since = "0.4.23", note = "Use `NaiveDate` or `DateTime<Tz>` instead")]
#[derive(Clone)]
// #[cfg_attr(feature = "rkyv", derive(Archive, Deserialize, Serialize))]
pub struct Date<Tz: TimeZone> {
date: NaiveDate,
offset: Tz::Offset,
}
/// The minimum possible `Date`.
#[allow(deprecated)]
#[deprecated(since = "0.4.20", note = "Use Date::MIN_UTC instead")]
pub const MIN_DATE: Date<Utc> = Date::<Utc>::MIN_UTC;
/// The maximum possible `Date`.
#[allow(deprecated)]
#[deprecated(since = "0.4.20", note = "Use Date::MAX_UTC instead")]
pub const MAX_DATE: Date<Utc> = Date::<Utc>::MAX_UTC;
impl<Tz: TimeZone> Date<Tz> {
/// Makes a new `Date` with given *UTC* date and offset.
/// The local date should be constructed via the `TimeZone` trait.
#[inline]
#[must_use]
pub fn from_utc(date: NaiveDate, offset: Tz::Offset) -> Date<Tz> {
Date { date, offset }
}
/// Makes a new `DateTime` from the current date and given `NaiveTime`.
/// The offset in the current date is preserved.
///
/// Returns `None` on invalid datetime.
#[inline]
#[must_use]
pub fn and_time(&self, time: NaiveTime) -> Option<DateTime<Tz>> {
let localdt = self.naive_local().and_time(time);
self.timezone().from_local_datetime(&localdt).single()
}
/// Makes a new `DateTime` from the current date, hour, minute and second.
/// The offset in the current date is preserved.
///
/// Panics on invalid hour, minute and/or second.
#[deprecated(since = "0.4.23", note = "Use and_hms_opt() instead")]
#[inline]
#[must_use]
pub fn and_hms(&self, hour: u32, min: u32, sec: u32) -> DateTime<Tz> {
self.and_hms_opt(hour, min, sec).expect("invalid time")
}
/// Makes a new `DateTime` from the current date, hour, minute and second.
/// The offset in the current date is preserved.
///
/// Returns `None` on invalid hour, minute and/or second.
#[inline]
#[must_use]
pub fn and_hms_opt(&self, hour: u32, min: u32, sec: u32) -> Option<DateTime<Tz>> {
NaiveTime::from_hms_opt(hour, min, sec).and_then(|time| self.and_time(time))
}
/// Makes a new `DateTime` from the current date, hour, minute, second and millisecond.
/// The millisecond part can exceed 1,000 in order to represent the leap second.
/// The offset in the current date is preserved.
///
/// Panics on invalid hour, minute, second and/or millisecond.
#[deprecated(since = "0.4.23", note = "Use and_hms_milli_opt() instead")]
#[inline]
#[must_use]
pub fn and_hms_milli(&self, hour: u32, min: u32, sec: u32, milli: u32) -> DateTime<Tz> {
self.and_hms_milli_opt(hour, min, sec, milli).expect("invalid time")
}
/// Makes a new `DateTime` from the current date, hour, minute, second and millisecond.
/// The millisecond part can exceed 1,000 in order to represent the leap second.
/// The offset in the current date is preserved.
///
/// Returns `None` on invalid hour, minute, second and/or millisecond.
#[inline]
#[must_use]
pub fn and_hms_milli_opt(
&self,
hour: u32,
min: u32,
sec: u32,
milli: u32,
) -> Option<DateTime<Tz>> {
NaiveTime::from_hms_milli_opt(hour, min, sec, milli).and_then(|time| self.and_time(time))
}
/// Makes a new `DateTime` from the current date, hour, minute, second and microsecond.
/// The microsecond part can exceed 1,000,000 in order to represent the leap second.
/// The offset in the current date is preserved.
///
/// Panics on invalid hour, minute, second and/or microsecond.
#[deprecated(since = "0.4.23", note = "Use and_hms_micro_opt() instead")]
#[inline]
#[must_use]
pub fn and_hms_micro(&self, hour: u32, min: u32, sec: u32, micro: u32) -> DateTime<Tz> {
self.and_hms_micro_opt(hour, min, sec, micro).expect("invalid time")
}
/// Makes a new `DateTime` from the current date, hour, minute, second and microsecond.
/// The microsecond part can exceed 1,000,000 in order to represent the leap second.
/// The offset in the current date is preserved.
///
/// Returns `None` on invalid hour, minute, second and/or microsecond.
#[inline]
#[must_use]
pub fn and_hms_micro_opt(
&self,
hour: u32,
min: u32,
sec: u32,
micro: u32,
) -> Option<DateTime<Tz>> {
NaiveTime::from_hms_micro_opt(hour, min, sec, micro).and_then(|time| self.and_time(time))
}
/// Makes a new `DateTime` from the current date, hour, minute, second and nanosecond.
/// The nanosecond part can exceed 1,000,000,000 in order to represent the leap second.
/// The offset in the current date is preserved.
///
/// Panics on invalid hour, minute, second and/or nanosecond.
#[deprecated(since = "0.4.23", note = "Use and_hms_nano_opt() instead")]
#[inline]
#[must_use]
pub fn and_hms_nano(&self, hour: u32, min: u32, sec: u32, nano: u32) -> DateTime<Tz> {
self.and_hms_nano_opt(hour, min, sec, nano).expect("invalid time")
}
/// Makes a new `DateTime` from the current date, hour, minute, second and nanosecond.
/// The nanosecond part can exceed 1,000,000,000 in order to represent the leap second.
/// The offset in the current date is preserved.
///
/// Returns `None` on invalid hour, minute, second and/or nanosecond.
#[inline]
#[must_use]
pub fn and_hms_nano_opt(
&self,
hour: u32,
min: u32,
sec: u32,
nano: u32,
) -> Option<DateTime<Tz>> {
NaiveTime::from_hms_nano_opt(hour, min, sec, nano).and_then(|time| self.and_time(time))
}
/// Makes a new `Date` for the next date.
///
/// Panics when `self` is the last representable date.
#[deprecated(since = "0.4.23", note = "Use succ_opt() instead")]
#[inline]
#[must_use]
pub fn succ(&self) -> Date<Tz> {
self.succ_opt().expect("out of bound")
}
/// Makes a new `Date` for the next date.
///
/// Returns `None` when `self` is the last representable date.
#[inline]
#[must_use]
pub fn succ_opt(&self) -> Option<Date<Tz>> {
self.date.succ_opt().map(|date| Date::from_utc(date, self.offset.clone()))
}
/// Makes a new `Date` for the prior date.
///
/// Panics when `self` is the first representable date.
#[deprecated(since = "0.4.23", note = "Use pred_opt() instead")]
#[inline]
#[must_use]
pub fn pred(&self) -> Date<Tz> {
self.pred_opt().expect("out of bound")
}
/// Makes a new `Date` for the prior date.
///
/// Returns `None` when `self` is the first representable date.
#[inline]
#[must_use]
pub fn pred_opt(&self) -> Option<Date<Tz>> {
self.date.pred_opt().map(|date| Date::from_utc(date, self.offset.clone()))
}
/// Retrieves an associated offset from UTC.
#[inline]
#[must_use]
pub fn offset(&self) -> &Tz::Offset {
&self.offset
}
/// Retrieves an associated time zone.
#[inline]
#[must_use]
pub fn timezone(&self) -> Tz {
TimeZone::from_offset(&self.offset)
}
/// Changes the associated time zone.
/// This does not change the actual `Date` (but will change the string representation).
#[inline]
#[must_use]
pub fn with_timezone<Tz2: TimeZone>(&self, tz: &Tz2) -> Date<Tz2> {
tz.from_utc_date(&self.date)
}
/// Adds given `TimeDelta` to the current date.
///
/// Returns `None` when it will result in overflow.
#[inline]
#[must_use]
pub fn checked_add_signed(self, rhs: TimeDelta) -> Option<Date<Tz>> {
let date = self.date.checked_add_signed(rhs)?;
Some(Date { date, offset: self.offset })
}
/// Subtracts given `TimeDelta` from the current date.
///
/// Returns `None` when it will result in overflow.
#[inline]
#[must_use]
pub fn checked_sub_signed(self, rhs: TimeDelta) -> Option<Date<Tz>> {
let date = self.date.checked_sub_signed(rhs)?;
Some(Date { date, offset: self.offset })
}
/// Subtracts another `Date` from the current date.
/// Returns a `TimeDelta` of integral numbers.
///
/// This does not overflow or underflow at all,
/// as all possible output fits in the range of `TimeDelta`.
#[inline]
#[must_use]
pub fn signed_duration_since<Tz2: TimeZone>(self, rhs: Date<Tz2>) -> TimeDelta {
self.date.signed_duration_since(rhs.date)
}
/// Returns a view to the naive UTC date.
#[inline]
#[must_use]
pub fn naive_utc(&self) -> NaiveDate {
self.date
}
/// Returns a view to the naive local date.
///
/// This is technically the same as [`naive_utc`](#method.naive_utc)
/// because the offset is restricted to never exceed one day,
/// but provided for the consistency.
#[inline]
#[must_use]
pub fn naive_local(&self) -> NaiveDate {
self.date
}
/// Returns the number of whole years from the given `base` until `self`.
#[must_use]
pub fn years_since(&self, base: Self) -> Option<u32> {
self.date.years_since(base.date)
}
/// The minimum possible `Date`.
pub const MIN_UTC: Date<Utc> = Date { date: NaiveDate::MIN, offset: Utc };
/// The maximum possible `Date`.
pub const MAX_UTC: Date<Utc> = Date { date: NaiveDate::MAX, offset: Utc };
}
/// Maps the local date to other date with given conversion function.
fn map_local<Tz: TimeZone, F>(d: &Date<Tz>, mut f: F) -> Option<Date<Tz>>
where
F: FnMut(NaiveDate) -> Option<NaiveDate>,
{
f(d.naive_local()).and_then(|date| d.timezone().from_local_date(&date).single())
}
impl<Tz: TimeZone> Date<Tz>
where
Tz::Offset: fmt::Display,
{
/// Formats the date with the specified formatting items.
#[cfg(feature = "alloc")]
#[inline]
#[must_use]
pub fn format_with_items<'a, I, B>(&self, items: I) -> DelayedFormat<I>
where
I: Iterator<Item = B> + Clone,
B: Borrow<Item<'a>>,
{
DelayedFormat::new_with_offset(Some(self.naive_local()), None, &self.offset, items)
}
/// Formats the date with the specified format string.
/// See the [`crate::format::strftime`] module
/// on the supported escape sequences.
#[cfg(feature = "alloc")]
#[inline]
#[must_use]
pub fn format<'a>(&self, fmt: &'a str) -> DelayedFormat<StrftimeItems<'a>> {
self.format_with_items(StrftimeItems::new(fmt))
}
/// Formats the date with the specified formatting items and locale.
#[cfg(all(feature = "unstable-locales", feature = "alloc"))]
#[inline]
#[must_use]
pub fn format_localized_with_items<'a, I, B>(
&self,
items: I,
locale: Locale,
) -> DelayedFormat<I>
where
I: Iterator<Item = B> + Clone,
B: Borrow<Item<'a>>,
{
DelayedFormat::new_with_offset_and_locale(
Some(self.naive_local()),
None,
&self.offset,
items,
locale,
)
}
/// Formats the date with the specified format string and locale.
/// See the [`crate::format::strftime`] module
/// on the supported escape sequences.
#[cfg(all(feature = "unstable-locales", feature = "alloc"))]
#[inline]
#[must_use]
pub fn format_localized<'a>(
&self,
fmt: &'a str,
locale: Locale,
) -> DelayedFormat<StrftimeItems<'a>> {
self.format_localized_with_items(StrftimeItems::new_with_locale(fmt, locale), locale)
}
}
impl<Tz: TimeZone> Datelike for Date<Tz> {
#[inline]
fn year(&self) -> i32 {
self.naive_local().year()
}
#[inline]
fn month(&self) -> u32 {
self.naive_local().month()
}
#[inline]
fn month0(&self) -> u32 {
self.naive_local().month0()
}
#[inline]
fn day(&self) -> u32 {
self.naive_local().day()
}
#[inline]
fn day0(&self) -> u32 {
self.naive_local().day0()
}
#[inline]
fn ordinal(&self) -> u32 {
self.naive_local().ordinal()
}
#[inline]
fn ordinal0(&self) -> u32 {
self.naive_local().ordinal0()
}
#[inline]
fn weekday(&self) -> Weekday {
self.naive_local().weekday()
}
#[inline]
fn iso_week(&self) -> IsoWeek {
self.naive_local().iso_week()
}
#[inline]
fn with_year(&self, year: i32) -> Option<Date<Tz>> {
map_local(self, |date| date.with_year(year))
}
#[inline]
fn with_month(&self, month: u32) -> Option<Date<Tz>> {
map_local(self, |date| date.with_month(month))
}
#[inline]
fn with_month0(&self, month0: u32) -> Option<Date<Tz>> {
map_local(self, |date| date.with_month0(month0))
}
#[inline]
fn with_day(&self, day: u32) -> Option<Date<Tz>> {
map_local(self, |date| date.with_day(day))
}
#[inline]
fn with_day0(&self, day0: u32) -> Option<Date<Tz>> {
map_local(self, |date| date.with_day0(day0))
}
#[inline]
fn with_ordinal(&self, ordinal: u32) -> Option<Date<Tz>> {
map_local(self, |date| date.with_ordinal(ordinal))
}
#[inline]
fn with_ordinal0(&self, ordinal0: u32) -> Option<Date<Tz>> {
map_local(self, |date| date.with_ordinal0(ordinal0))
}
}
// we need them as automatic impls cannot handle associated types
impl<Tz: TimeZone> Copy for Date<Tz> where <Tz as TimeZone>::Offset: Copy {}
unsafe impl<Tz: TimeZone> Send for Date<Tz> where <Tz as TimeZone>::Offset: Send {}
impl<Tz: TimeZone, Tz2: TimeZone> PartialEq<Date<Tz2>> for Date<Tz> {
fn eq(&self, other: &Date<Tz2>) -> bool {
self.date == other.date
}
}
impl<Tz: TimeZone> Eq for Date<Tz> {}
impl<Tz: TimeZone> PartialOrd for Date<Tz> {
fn partial_cmp(&self, other: &Date<Tz>) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<Tz: TimeZone> Ord for Date<Tz> {
fn cmp(&self, other: &Date<Tz>) -> Ordering {
self.date.cmp(&other.date)
}
}
impl<Tz: TimeZone> hash::Hash for Date<Tz> {
fn hash<H: hash::Hasher>(&self, state: &mut H) {
self.date.hash(state)
}
}
impl<Tz: TimeZone> Add<TimeDelta> for Date<Tz> {
type Output = Date<Tz>;
#[inline]
fn add(self, rhs: TimeDelta) -> Date<Tz> {
self.checked_add_signed(rhs).expect("`Date + TimeDelta` overflowed")
}
}
impl<Tz: TimeZone> AddAssign<TimeDelta> for Date<Tz> {
#[inline]
fn add_assign(&mut self, rhs: TimeDelta) {
self.date = self.date.checked_add_signed(rhs).expect("`Date + TimeDelta` overflowed");
}
}
impl<Tz: TimeZone> Sub<TimeDelta> for Date<Tz> {
type Output = Date<Tz>;
#[inline]
fn sub(self, rhs: TimeDelta) -> Date<Tz> {
self.checked_sub_signed(rhs).expect("`Date - TimeDelta` overflowed")
}
}
impl<Tz: TimeZone> SubAssign<TimeDelta> for Date<Tz> {
#[inline]
fn sub_assign(&mut self, rhs: TimeDelta) {
self.date = self.date.checked_sub_signed(rhs).expect("`Date - TimeDelta` overflowed");
}
}
impl<Tz: TimeZone> Sub<Date<Tz>> for Date<Tz> {
type Output = TimeDelta;
#[inline]
fn sub(self, rhs: Date<Tz>) -> TimeDelta {
self.signed_duration_since(rhs)
}
}
impl<Tz: TimeZone> fmt::Debug for Date<Tz> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.naive_local().fmt(f)?;
self.offset.fmt(f)
}
}
impl<Tz: TimeZone> fmt::Display for Date<Tz>
where
Tz::Offset: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.naive_local().fmt(f)?;
self.offset.fmt(f)
}
}
// Note that implementation of Arbitrary cannot be automatically derived for Date<Tz>, due to
// the nontrivial bound <Tz as TimeZone>::Offset: Arbitrary.
#[cfg(all(feature = "arbitrary", feature = "std"))]
impl<'a, Tz> arbitrary::Arbitrary<'a> for Date<Tz>
where
Tz: TimeZone,
<Tz as TimeZone>::Offset: arbitrary::Arbitrary<'a>,
{
fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Date<Tz>> {
let date = NaiveDate::arbitrary(u)?;
let offset = <Tz as TimeZone>::Offset::arbitrary(u)?;
Ok(Date::from_utc(date, offset))
}
}
#[cfg(test)]
mod tests {
use super::Date;
use crate::{FixedOffset, NaiveDate, TimeDelta, Utc};
#[cfg(feature = "clock")]
use crate::offset::{Local, TimeZone};
#[test]
#[cfg(feature = "clock")]
fn test_years_elapsed() {
const WEEKS_PER_YEAR: f32 = 52.1775;
// This is always at least one year because 1 year = 52.1775 weeks.
let one_year_ago = Utc::today() - TimeDelta::weeks((WEEKS_PER_YEAR * 1.5).ceil() as i64);
// A bit more than 2 years.
let two_year_ago = Utc::today() - TimeDelta::weeks((WEEKS_PER_YEAR * 2.5).ceil() as i64);
assert_eq!(Utc::today().years_since(one_year_ago), Some(1));
assert_eq!(Utc::today().years_since(two_year_ago), Some(2));
// If the given DateTime is later than now, the function will always return 0.
let future = Utc::today() + TimeDelta::weeks(12);
assert_eq!(Utc::today().years_since(future), None);
}
#[test]
fn test_date_add_assign() {
let naivedate = NaiveDate::from_ymd_opt(2000, 1, 1).unwrap();
let date = Date::<Utc>::from_utc(naivedate, Utc);
let mut date_add = date;
date_add += TimeDelta::days(5);
assert_eq!(date_add, date + TimeDelta::days(5));
let timezone = FixedOffset::east_opt(60 * 60).unwrap();
let date = date.with_timezone(&timezone);
let date_add = date_add.with_timezone(&timezone);
assert_eq!(date_add, date + TimeDelta::days(5));
let timezone = FixedOffset::west_opt(2 * 60 * 60).unwrap();
let date = date.with_timezone(&timezone);
let date_add = date_add.with_timezone(&timezone);
assert_eq!(date_add, date + TimeDelta::days(5));
}
#[test]
#[cfg(feature = "clock")]
fn test_date_add_assign_local() {
let naivedate = NaiveDate::from_ymd_opt(2000, 1, 1).unwrap();
let date = Local.from_utc_date(&naivedate);
let mut date_add = date;
date_add += TimeDelta::days(5);
assert_eq!(date_add, date + TimeDelta::days(5));
}
#[test]
fn test_date_sub_assign() {
let naivedate = NaiveDate::from_ymd_opt(2000, 1, 1).unwrap();
let date = Date::<Utc>::from_utc(naivedate, Utc);
let mut date_sub = date;
date_sub -= TimeDelta::days(5);
assert_eq!(date_sub, date - TimeDelta::days(5));
let timezone = FixedOffset::east_opt(60 * 60).unwrap();
let date = date.with_timezone(&timezone);
let date_sub = date_sub.with_timezone(&timezone);
assert_eq!(date_sub, date - TimeDelta::days(5));
let timezone = FixedOffset::west_opt(2 * 60 * 60).unwrap();
let date = date.with_timezone(&timezone);
let date_sub = date_sub.with_timezone(&timezone);
assert_eq!(date_sub, date - TimeDelta::days(5));
}
#[test]
#[cfg(feature = "clock")]
fn test_date_sub_assign_local() {
let naivedate = NaiveDate::from_ymd_opt(2000, 1, 1).unwrap();
let date = Local.from_utc_date(&naivedate);
let mut date_sub = date;
date_sub -= TimeDelta::days(5);
assert_eq!(date_sub, date - TimeDelta::days(5));
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,945 @@
// This is a part of Chrono.
// See README.md and LICENSE.txt for details.
//! Date and time formatting routines.
#[cfg(all(feature = "alloc", not(feature = "std"), not(test)))]
use alloc::string::{String, ToString};
#[cfg(feature = "alloc")]
use core::borrow::Borrow;
#[cfg(feature = "alloc")]
use core::fmt::Display;
use core::fmt::{self, Write};
#[cfg(feature = "alloc")]
use crate::offset::Offset;
#[cfg(any(feature = "alloc", feature = "serde"))]
use crate::{Datelike, FixedOffset, NaiveDateTime, Timelike};
#[cfg(feature = "alloc")]
use crate::{NaiveDate, NaiveTime, Weekday};
#[cfg(feature = "alloc")]
use super::locales;
#[cfg(any(feature = "alloc", feature = "serde"))]
use super::{Colons, OffsetFormat, OffsetPrecision, Pad};
#[cfg(feature = "alloc")]
use super::{Fixed, InternalFixed, InternalInternal, Item, Numeric};
#[cfg(feature = "alloc")]
use locales::*;
/// A *temporary* object which can be used as an argument to `format!` or others.
/// This is normally constructed via `format` methods of each date and time type.
#[cfg(feature = "alloc")]
#[derive(Debug)]
pub struct DelayedFormat<I> {
/// The date view, if any.
date: Option<NaiveDate>,
/// The time view, if any.
time: Option<NaiveTime>,
/// The name and local-to-UTC difference for the offset (timezone), if any.
off: Option<(String, FixedOffset)>,
/// An iterator returning formatting items.
items: I,
/// Locale used for text.
/// ZST if the `unstable-locales` feature is not enabled.
locale: Locale,
}
#[cfg(feature = "alloc")]
impl<'a, I: Iterator<Item = B> + Clone, B: Borrow<Item<'a>>> DelayedFormat<I> {
/// Makes a new `DelayedFormat` value out of local date and time.
#[must_use]
pub fn new(date: Option<NaiveDate>, time: Option<NaiveTime>, items: I) -> DelayedFormat<I> {
DelayedFormat { date, time, off: None, items, locale: default_locale() }
}
/// Makes a new `DelayedFormat` value out of local date and time and UTC offset.
#[must_use]
pub fn new_with_offset<Off>(
date: Option<NaiveDate>,
time: Option<NaiveTime>,
offset: &Off,
items: I,
) -> DelayedFormat<I>
where
Off: Offset + Display,
{
let name_and_diff = (offset.to_string(), offset.fix());
DelayedFormat { date, time, off: Some(name_and_diff), items, locale: default_locale() }
}
/// Makes a new `DelayedFormat` value out of local date and time and locale.
#[cfg(feature = "unstable-locales")]
#[must_use]
pub fn new_with_locale(
date: Option<NaiveDate>,
time: Option<NaiveTime>,
items: I,
locale: Locale,
) -> DelayedFormat<I> {
DelayedFormat { date, time, off: None, items, locale }
}
/// Makes a new `DelayedFormat` value out of local date and time, UTC offset and locale.
#[cfg(feature = "unstable-locales")]
#[must_use]
pub fn new_with_offset_and_locale<Off>(
date: Option<NaiveDate>,
time: Option<NaiveTime>,
offset: &Off,
items: I,
locale: Locale,
) -> DelayedFormat<I>
where
Off: Offset + Display,
{
let name_and_diff = (offset.to_string(), offset.fix());
DelayedFormat { date, time, off: Some(name_and_diff), items, locale }
}
/// Formats `DelayedFormat` into a `core::fmt::Write` instance.
/// # Errors
/// This function returns a `core::fmt::Error` if formatting into the `core::fmt::Write` instance fails.
///
/// # Example
/// ### Writing to a String
/// ```
/// let dt = chrono::DateTime::from_timestamp(1643723400, 123456789).unwrap();
/// let df = dt.format("%Y-%m-%d %H:%M:%S%.9f");
/// let mut buffer = String::new();
/// let _ = df.write_to(&mut buffer);
/// ```
pub fn write_to(&self, w: &mut impl Write) -> fmt::Result {
for item in self.items.clone() {
match *item.borrow() {
Item::Literal(s) | Item::Space(s) => w.write_str(s),
#[cfg(feature = "alloc")]
Item::OwnedLiteral(ref s) | Item::OwnedSpace(ref s) => w.write_str(s),
Item::Numeric(ref spec, pad) => self.format_numeric(w, spec, pad),
Item::Fixed(ref spec) => self.format_fixed(w, spec),
Item::Error => Err(fmt::Error),
}?;
}
Ok(())
}
#[cfg(feature = "alloc")]
fn format_numeric(&self, w: &mut impl Write, spec: &Numeric, pad: Pad) -> fmt::Result {
use self::Numeric::*;
fn write_one(w: &mut impl Write, v: u8) -> fmt::Result {
w.write_char((b'0' + v) as char)
}
fn write_two(w: &mut impl Write, v: u8, pad: Pad) -> fmt::Result {
let ones = b'0' + v % 10;
match (v / 10, pad) {
(0, Pad::None) => {}
(0, Pad::Space) => w.write_char(' ')?,
(tens, _) => w.write_char((b'0' + tens) as char)?,
}
w.write_char(ones as char)
}
#[inline]
fn write_year(w: &mut impl Write, year: i32, pad: Pad) -> fmt::Result {
if (1000..=9999).contains(&year) {
// fast path
write_hundreds(w, (year / 100) as u8)?;
write_hundreds(w, (year % 100) as u8)
} else {
write_n(w, 4, year as i64, pad, !(0..10_000).contains(&year))
}
}
fn write_n(
w: &mut impl Write,
n: usize,
v: i64,
pad: Pad,
always_sign: bool,
) -> fmt::Result {
if always_sign {
match pad {
Pad::None => write!(w, "{v:+}"),
Pad::Zero => write!(w, "{:+01$}", v, n + 1),
Pad::Space => write!(w, "{:+1$}", v, n + 1),
}
} else {
match pad {
Pad::None => write!(w, "{v}"),
Pad::Zero => write!(w, "{v:0n$}"),
Pad::Space => write!(w, "{v:n$}"),
}
}
}
match (spec, self.date, self.time) {
(Year, Some(d), _) => write_year(w, d.year(), pad),
(YearDiv100, Some(d), _) => write_two(w, d.year().div_euclid(100) as u8, pad),
(YearMod100, Some(d), _) => write_two(w, d.year().rem_euclid(100) as u8, pad),
(IsoYear, Some(d), _) => write_year(w, d.iso_week().year(), pad),
(IsoYearDiv100, Some(d), _) => {
write_two(w, d.iso_week().year().div_euclid(100) as u8, pad)
}
(IsoYearMod100, Some(d), _) => {
write_two(w, d.iso_week().year().rem_euclid(100) as u8, pad)
}
(Quarter, Some(d), _) => write_one(w, d.quarter() as u8),
(Month, Some(d), _) => write_two(w, d.month() as u8, pad),
(Day, Some(d), _) => write_two(w, d.day() as u8, pad),
(WeekFromSun, Some(d), _) => write_two(w, d.weeks_from(Weekday::Sun) as u8, pad),
(WeekFromMon, Some(d), _) => write_two(w, d.weeks_from(Weekday::Mon) as u8, pad),
(IsoWeek, Some(d), _) => write_two(w, d.iso_week().week() as u8, pad),
(NumDaysFromSun, Some(d), _) => write_one(w, d.weekday().num_days_from_sunday() as u8),
(WeekdayFromMon, Some(d), _) => write_one(w, d.weekday().number_from_monday() as u8),
(Ordinal, Some(d), _) => write_n(w, 3, d.ordinal() as i64, pad, false),
(Hour, _, Some(t)) => write_two(w, t.hour() as u8, pad),
(Hour12, _, Some(t)) => write_two(w, t.hour12().1 as u8, pad),
(Minute, _, Some(t)) => write_two(w, t.minute() as u8, pad),
(Second, _, Some(t)) => {
write_two(w, (t.second() + t.nanosecond() / 1_000_000_000) as u8, pad)
}
(Nanosecond, _, Some(t)) => {
write_n(w, 9, (t.nanosecond() % 1_000_000_000) as i64, pad, false)
}
(Timestamp, Some(d), Some(t)) => {
let offset = self.off.as_ref().map(|(_, o)| i64::from(o.local_minus_utc()));
let timestamp = d.and_time(t).and_utc().timestamp() - offset.unwrap_or(0);
write_n(w, 9, timestamp, pad, false)
}
(Internal(_), _, _) => Ok(()), // for future expansion
_ => Err(fmt::Error), // insufficient arguments for given format
}
}
#[cfg(feature = "alloc")]
fn format_fixed(&self, w: &mut impl Write, spec: &Fixed) -> fmt::Result {
use Fixed::*;
use InternalInternal::*;
match (spec, self.date, self.time, self.off.as_ref()) {
(ShortMonthName, Some(d), _, _) => {
w.write_str(short_months(self.locale)[d.month0() as usize])
}
(LongMonthName, Some(d), _, _) => {
w.write_str(long_months(self.locale)[d.month0() as usize])
}
(ShortWeekdayName, Some(d), _, _) => w.write_str(
short_weekdays(self.locale)[d.weekday().num_days_from_sunday() as usize],
),
(LongWeekdayName, Some(d), _, _) => {
w.write_str(long_weekdays(self.locale)[d.weekday().num_days_from_sunday() as usize])
}
(LowerAmPm, _, Some(t), _) => {
let ampm = if t.hour12().0 { am_pm(self.locale)[1] } else { am_pm(self.locale)[0] };
for c in ampm.chars().flat_map(|c| c.to_lowercase()) {
w.write_char(c)?
}
Ok(())
}
(UpperAmPm, _, Some(t), _) => {
let ampm = if t.hour12().0 { am_pm(self.locale)[1] } else { am_pm(self.locale)[0] };
w.write_str(ampm)
}
(Nanosecond, _, Some(t), _) => {
let nano = t.nanosecond() % 1_000_000_000;
if nano == 0 {
Ok(())
} else {
w.write_str(decimal_point(self.locale))?;
if nano % 1_000_000 == 0 {
write!(w, "{:03}", nano / 1_000_000)
} else if nano % 1_000 == 0 {
write!(w, "{:06}", nano / 1_000)
} else {
write!(w, "{nano:09}")
}
}
}
(Nanosecond3, _, Some(t), _) => {
w.write_str(decimal_point(self.locale))?;
write!(w, "{:03}", t.nanosecond() / 1_000_000 % 1000)
}
(Nanosecond6, _, Some(t), _) => {
w.write_str(decimal_point(self.locale))?;
write!(w, "{:06}", t.nanosecond() / 1_000 % 1_000_000)
}
(Nanosecond9, _, Some(t), _) => {
w.write_str(decimal_point(self.locale))?;
write!(w, "{:09}", t.nanosecond() % 1_000_000_000)
}
(Internal(InternalFixed { val: Nanosecond3NoDot }), _, Some(t), _) => {
write!(w, "{:03}", t.nanosecond() / 1_000_000 % 1_000)
}
(Internal(InternalFixed { val: Nanosecond6NoDot }), _, Some(t), _) => {
write!(w, "{:06}", t.nanosecond() / 1_000 % 1_000_000)
}
(Internal(InternalFixed { val: Nanosecond9NoDot }), _, Some(t), _) => {
write!(w, "{:09}", t.nanosecond() % 1_000_000_000)
}
(TimezoneName, _, _, Some((tz_name, _))) => write!(w, "{tz_name}"),
(TimezoneOffset | TimezoneOffsetZ, _, _, Some((_, off))) => {
let offset_format = OffsetFormat {
precision: OffsetPrecision::Minutes,
colons: Colons::Maybe,
allow_zulu: *spec == TimezoneOffsetZ,
padding: Pad::Zero,
};
offset_format.format(w, *off)
}
(TimezoneOffsetColon | TimezoneOffsetColonZ, _, _, Some((_, off))) => {
let offset_format = OffsetFormat {
precision: OffsetPrecision::Minutes,
colons: Colons::Colon,
allow_zulu: *spec == TimezoneOffsetColonZ,
padding: Pad::Zero,
};
offset_format.format(w, *off)
}
(TimezoneOffsetDoubleColon, _, _, Some((_, off))) => {
let offset_format = OffsetFormat {
precision: OffsetPrecision::Seconds,
colons: Colons::Colon,
allow_zulu: false,
padding: Pad::Zero,
};
offset_format.format(w, *off)
}
(TimezoneOffsetTripleColon, _, _, Some((_, off))) => {
let offset_format = OffsetFormat {
precision: OffsetPrecision::Hours,
colons: Colons::None,
allow_zulu: false,
padding: Pad::Zero,
};
offset_format.format(w, *off)
}
(RFC2822, Some(d), Some(t), Some((_, off))) => {
write_rfc2822(w, crate::NaiveDateTime::new(d, t), *off)
}
(RFC3339, Some(d), Some(t), Some((_, off))) => write_rfc3339(
w,
crate::NaiveDateTime::new(d, t),
*off,
SecondsFormat::AutoSi,
false,
),
_ => Err(fmt::Error), // insufficient arguments for given format
}
}
}
#[cfg(feature = "alloc")]
impl<'a, I: Iterator<Item = B> + Clone, B: Borrow<Item<'a>>> Display for DelayedFormat<I> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut result = String::new();
self.write_to(&mut result)?;
f.pad(&result)
}
}
/// Tries to format given arguments with given formatting items.
/// Internally used by `DelayedFormat`.
#[cfg(feature = "alloc")]
#[deprecated(since = "0.4.32", note = "Use DelayedFormat::fmt or DelayedFormat::write_to instead")]
pub fn format<'a, I, B>(
w: &mut fmt::Formatter,
date: Option<&NaiveDate>,
time: Option<&NaiveTime>,
off: Option<&(String, FixedOffset)>,
items: I,
) -> fmt::Result
where
I: Iterator<Item = B> + Clone,
B: Borrow<Item<'a>>,
{
DelayedFormat {
date: date.copied(),
time: time.copied(),
off: off.cloned(),
items,
locale: default_locale(),
}
.fmt(w)
}
/// Formats single formatting item
#[cfg(feature = "alloc")]
#[deprecated(since = "0.4.32", note = "Use DelayedFormat::fmt or DelayedFormat::write_to instead")]
pub fn format_item(
w: &mut fmt::Formatter,
date: Option<&NaiveDate>,
time: Option<&NaiveTime>,
off: Option<&(String, FixedOffset)>,
item: &Item<'_>,
) -> fmt::Result {
DelayedFormat {
date: date.copied(),
time: time.copied(),
off: off.cloned(),
items: [item].into_iter(),
locale: default_locale(),
}
.fmt(w)
}
#[cfg(any(feature = "alloc", feature = "serde"))]
impl OffsetFormat {
/// Writes an offset from UTC with the format defined by `self`.
fn format(&self, w: &mut impl Write, off: FixedOffset) -> fmt::Result {
let off = off.local_minus_utc();
if self.allow_zulu && off == 0 {
w.write_char('Z')?;
return Ok(());
}
let (sign, off) = if off < 0 { ('-', -off) } else { ('+', off) };
let hours;
let mut mins = 0;
let mut secs = 0;
let precision = match self.precision {
OffsetPrecision::Hours => {
// Minutes and seconds are simply truncated
hours = (off / 3600) as u8;
OffsetPrecision::Hours
}
OffsetPrecision::Minutes | OffsetPrecision::OptionalMinutes => {
// Round seconds to the nearest minute.
let minutes = (off + 30) / 60;
mins = (minutes % 60) as u8;
hours = (minutes / 60) as u8;
if self.precision == OffsetPrecision::OptionalMinutes && mins == 0 {
OffsetPrecision::Hours
} else {
OffsetPrecision::Minutes
}
}
OffsetPrecision::Seconds
| OffsetPrecision::OptionalSeconds
| OffsetPrecision::OptionalMinutesAndSeconds => {
let minutes = off / 60;
secs = (off % 60) as u8;
mins = (minutes % 60) as u8;
hours = (minutes / 60) as u8;
if self.precision != OffsetPrecision::Seconds && secs == 0 {
if self.precision == OffsetPrecision::OptionalMinutesAndSeconds && mins == 0 {
OffsetPrecision::Hours
} else {
OffsetPrecision::Minutes
}
} else {
OffsetPrecision::Seconds
}
}
};
let colons = self.colons == Colons::Colon;
if hours < 10 {
if self.padding == Pad::Space {
w.write_char(' ')?;
}
w.write_char(sign)?;
if self.padding == Pad::Zero {
w.write_char('0')?;
}
w.write_char((b'0' + hours) as char)?;
} else {
w.write_char(sign)?;
write_hundreds(w, hours)?;
}
if let OffsetPrecision::Minutes | OffsetPrecision::Seconds = precision {
if colons {
w.write_char(':')?;
}
write_hundreds(w, mins)?;
}
if let OffsetPrecision::Seconds = precision {
if colons {
w.write_char(':')?;
}
write_hundreds(w, secs)?;
}
Ok(())
}
}
/// Specific formatting options for seconds. This may be extended in the
/// future, so exhaustive matching in external code is not recommended.
///
/// See the `TimeZone::to_rfc3339_opts` function for usage.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
#[allow(clippy::manual_non_exhaustive)]
pub enum SecondsFormat {
/// Format whole seconds only, with no decimal point nor subseconds.
Secs,
/// Use fixed 3 subsecond digits. This corresponds to [Fixed::Nanosecond3].
Millis,
/// Use fixed 6 subsecond digits. This corresponds to [Fixed::Nanosecond6].
Micros,
/// Use fixed 9 subsecond digits. This corresponds to [Fixed::Nanosecond9].
Nanos,
/// Automatically select one of `Secs`, `Millis`, `Micros`, or `Nanos` to display all available
/// non-zero sub-second digits. This corresponds to [Fixed::Nanosecond].
AutoSi,
// Do not match against this.
#[doc(hidden)]
__NonExhaustive,
}
/// Writes the date, time and offset to the string. same as `%Y-%m-%dT%H:%M:%S%.f%:z`
#[inline]
#[cfg(any(feature = "alloc", feature = "serde"))]
pub(crate) fn write_rfc3339(
w: &mut impl Write,
dt: NaiveDateTime,
off: FixedOffset,
secform: SecondsFormat,
use_z: bool,
) -> fmt::Result {
let year = dt.date().year();
if (0..=9999).contains(&year) {
write_hundreds(w, (year / 100) as u8)?;
write_hundreds(w, (year % 100) as u8)?;
} else {
// ISO 8601 requires the explicit sign for out-of-range years
write!(w, "{year:+05}")?;
}
w.write_char('-')?;
write_hundreds(w, dt.date().month() as u8)?;
w.write_char('-')?;
write_hundreds(w, dt.date().day() as u8)?;
w.write_char('T')?;
let (hour, min, mut sec) = dt.time().hms();
let mut nano = dt.nanosecond();
if nano >= 1_000_000_000 {
sec += 1;
nano -= 1_000_000_000;
}
write_hundreds(w, hour as u8)?;
w.write_char(':')?;
write_hundreds(w, min as u8)?;
w.write_char(':')?;
let sec = sec;
write_hundreds(w, sec as u8)?;
match secform {
SecondsFormat::Secs => {}
SecondsFormat::Millis => write!(w, ".{:03}", nano / 1_000_000)?,
SecondsFormat::Micros => write!(w, ".{:06}", nano / 1000)?,
SecondsFormat::Nanos => write!(w, ".{nano:09}")?,
SecondsFormat::AutoSi => {
if nano == 0 {
} else if nano % 1_000_000 == 0 {
write!(w, ".{:03}", nano / 1_000_000)?
} else if nano % 1_000 == 0 {
write!(w, ".{:06}", nano / 1_000)?
} else {
write!(w, ".{nano:09}")?
}
}
SecondsFormat::__NonExhaustive => unreachable!(),
};
OffsetFormat {
precision: OffsetPrecision::Minutes,
colons: Colons::Colon,
allow_zulu: use_z,
padding: Pad::Zero,
}
.format(w, off)
}
#[cfg(feature = "alloc")]
/// write datetimes like `Tue, 1 Jul 2003 10:52:37 +0200`, same as `%a, %d %b %Y %H:%M:%S %z`
pub(crate) fn write_rfc2822(
w: &mut impl Write,
dt: NaiveDateTime,
off: FixedOffset,
) -> fmt::Result {
let year = dt.year();
// RFC2822 is only defined on years 0 through 9999
if !(0..=9999).contains(&year) {
return Err(fmt::Error);
}
let english = default_locale();
w.write_str(short_weekdays(english)[dt.weekday().num_days_from_sunday() as usize])?;
w.write_str(", ")?;
let day = dt.day();
if day < 10 {
w.write_char((b'0' + day as u8) as char)?;
} else {
write_hundreds(w, day as u8)?;
}
w.write_char(' ')?;
w.write_str(short_months(english)[dt.month0() as usize])?;
w.write_char(' ')?;
write_hundreds(w, (year / 100) as u8)?;
write_hundreds(w, (year % 100) as u8)?;
w.write_char(' ')?;
let (hour, min, sec) = dt.time().hms();
write_hundreds(w, hour as u8)?;
w.write_char(':')?;
write_hundreds(w, min as u8)?;
w.write_char(':')?;
let sec = sec + dt.nanosecond() / 1_000_000_000;
write_hundreds(w, sec as u8)?;
w.write_char(' ')?;
OffsetFormat {
precision: OffsetPrecision::Minutes,
colons: Colons::None,
allow_zulu: false,
padding: Pad::Zero,
}
.format(w, off)
}
/// Equivalent to `{:02}` formatting for n < 100.
pub(crate) fn write_hundreds(w: &mut impl Write, n: u8) -> fmt::Result {
if n >= 100 {
return Err(fmt::Error);
}
let tens = b'0' + n / 10;
let ones = b'0' + n % 10;
w.write_char(tens as char)?;
w.write_char(ones as char)
}
#[cfg(test)]
#[cfg(feature = "alloc")]
mod tests {
use super::{Colons, OffsetFormat, OffsetPrecision, Pad};
use crate::FixedOffset;
#[cfg(feature = "alloc")]
use crate::{NaiveDate, NaiveTime, TimeZone, Timelike, Utc};
#[cfg(feature = "alloc")]
#[test]
fn test_delayed_write_to() {
let dt = crate::DateTime::from_timestamp(1643723400, 123456789).unwrap();
let df = dt.format("%Y-%m-%d %H:%M:%S%.9f");
let mut dt_str = String::new();
df.write_to(&mut dt_str).unwrap();
assert_eq!(dt_str, "2022-02-01 13:50:00.123456789");
}
#[cfg(all(feature = "std", feature = "unstable-locales", feature = "alloc"))]
#[test]
fn test_with_locale_delayed_write_to() {
use crate::DateTime;
use crate::format::locales::Locale;
let dt = DateTime::from_timestamp(1643723400, 123456789).unwrap();
let df = dt.format_localized("%A, %B %d, %Y", Locale::ja_JP);
let mut dt_str = String::new();
df.write_to(&mut dt_str).unwrap();
assert_eq!(dt_str, "火曜日, 2月 01, 2022");
}
#[test]
#[cfg(feature = "alloc")]
fn test_date_format() {
let d = NaiveDate::from_ymd_opt(2012, 3, 4).unwrap();
assert_eq!(d.format("%Y,%C,%y,%G,%g").to_string(), "2012,20,12,2012,12");
assert_eq!(d.format("%m,%b,%h,%B").to_string(), "03,Mar,Mar,March");
assert_eq!(d.format("%q").to_string(), "1");
assert_eq!(d.format("%d,%e").to_string(), "04, 4");
assert_eq!(d.format("%U,%W,%V").to_string(), "10,09,09");
assert_eq!(d.format("%a,%A,%w,%u").to_string(), "Sun,Sunday,0,7");
assert_eq!(d.format("%j").to_string(), "064"); // since 2012 is a leap year
assert_eq!(d.format("%D,%x").to_string(), "03/04/12,03/04/12");
assert_eq!(d.format("%F").to_string(), "2012-03-04");
assert_eq!(d.format("%v").to_string(), " 4-Mar-2012");
assert_eq!(d.format("%t%n%%%n%t").to_string(), "\t\n%\n\t");
// non-four-digit years
assert_eq!(
NaiveDate::from_ymd_opt(12345, 1, 1).unwrap().format("%Y").to_string(),
"+12345"
);
assert_eq!(NaiveDate::from_ymd_opt(1234, 1, 1).unwrap().format("%Y").to_string(), "1234");
assert_eq!(NaiveDate::from_ymd_opt(123, 1, 1).unwrap().format("%Y").to_string(), "0123");
assert_eq!(NaiveDate::from_ymd_opt(12, 1, 1).unwrap().format("%Y").to_string(), "0012");
assert_eq!(NaiveDate::from_ymd_opt(1, 1, 1).unwrap().format("%Y").to_string(), "0001");
assert_eq!(NaiveDate::from_ymd_opt(0, 1, 1).unwrap().format("%Y").to_string(), "0000");
assert_eq!(NaiveDate::from_ymd_opt(-1, 1, 1).unwrap().format("%Y").to_string(), "-0001");
assert_eq!(NaiveDate::from_ymd_opt(-12, 1, 1).unwrap().format("%Y").to_string(), "-0012");
assert_eq!(NaiveDate::from_ymd_opt(-123, 1, 1).unwrap().format("%Y").to_string(), "-0123");
assert_eq!(NaiveDate::from_ymd_opt(-1234, 1, 1).unwrap().format("%Y").to_string(), "-1234");
assert_eq!(
NaiveDate::from_ymd_opt(-12345, 1, 1).unwrap().format("%Y").to_string(),
"-12345"
);
// corner cases
assert_eq!(
NaiveDate::from_ymd_opt(2007, 12, 31).unwrap().format("%G,%g,%U,%W,%V").to_string(),
"2008,08,52,53,01"
);
assert_eq!(
NaiveDate::from_ymd_opt(2010, 1, 3).unwrap().format("%G,%g,%U,%W,%V").to_string(),
"2009,09,01,00,53"
);
}
#[test]
#[cfg(feature = "alloc")]
fn test_time_format() {
let t = NaiveTime::from_hms_nano_opt(3, 5, 7, 98765432).unwrap();
assert_eq!(t.format("%H,%k,%I,%l,%P,%p").to_string(), "03, 3,03, 3,am,AM");
assert_eq!(t.format("%M").to_string(), "05");
assert_eq!(t.format("%S,%f,%.f").to_string(), "07,098765432,.098765432");
assert_eq!(t.format("%.3f,%.6f,%.9f").to_string(), ".098,.098765,.098765432");
assert_eq!(t.format("%R").to_string(), "03:05");
assert_eq!(t.format("%T,%X").to_string(), "03:05:07,03:05:07");
assert_eq!(t.format("%r").to_string(), "03:05:07 AM");
assert_eq!(t.format("%t%n%%%n%t").to_string(), "\t\n%\n\t");
let t = NaiveTime::from_hms_micro_opt(3, 5, 7, 432100).unwrap();
assert_eq!(t.format("%S,%f,%.f").to_string(), "07,432100000,.432100");
assert_eq!(t.format("%.3f,%.6f,%.9f").to_string(), ".432,.432100,.432100000");
let t = NaiveTime::from_hms_milli_opt(3, 5, 7, 210).unwrap();
assert_eq!(t.format("%S,%f,%.f").to_string(), "07,210000000,.210");
assert_eq!(t.format("%.3f,%.6f,%.9f").to_string(), ".210,.210000,.210000000");
let t = NaiveTime::from_hms_opt(3, 5, 7).unwrap();
assert_eq!(t.format("%S,%f,%.f").to_string(), "07,000000000,");
assert_eq!(t.format("%.3f,%.6f,%.9f").to_string(), ".000,.000000,.000000000");
// corner cases
assert_eq!(
NaiveTime::from_hms_opt(13, 57, 9).unwrap().format("%r").to_string(),
"01:57:09 PM"
);
assert_eq!(
NaiveTime::from_hms_milli_opt(23, 59, 59, 1_000).unwrap().format("%X").to_string(),
"23:59:60"
);
}
#[test]
#[cfg(feature = "alloc")]
fn test_datetime_format() {
let dt =
NaiveDate::from_ymd_opt(2010, 9, 8).unwrap().and_hms_milli_opt(7, 6, 54, 321).unwrap();
assert_eq!(dt.format("%c").to_string(), "Wed Sep 8 07:06:54 2010");
assert_eq!(dt.format("%s").to_string(), "1283929614");
assert_eq!(dt.format("%t%n%%%n%t").to_string(), "\t\n%\n\t");
// a horror of leap second: coming near to you.
let dt = NaiveDate::from_ymd_opt(2012, 6, 30)
.unwrap()
.and_hms_milli_opt(23, 59, 59, 1_000)
.unwrap();
assert_eq!(dt.format("%c").to_string(), "Sat Jun 30 23:59:60 2012");
assert_eq!(dt.format("%s").to_string(), "1341100799"); // not 1341100800, it's intentional.
}
#[test]
#[cfg(feature = "alloc")]
fn test_datetime_format_alignment() {
let datetime = Utc
.with_ymd_and_hms(2007, 1, 2, 12, 34, 56)
.unwrap()
.with_nanosecond(123456789)
.unwrap();
// Item::Literal, odd number of padding bytes.
let percent = datetime.format("%%");
assert_eq!(" %", format!("{percent:>4}"));
assert_eq!("% ", format!("{percent:<4}"));
assert_eq!(" % ", format!("{percent:^4}"));
// Item::Numeric, custom non-ASCII padding character
let year = datetime.format("%Y");
assert_eq!("——2007", format!("{year:—>6}"));
assert_eq!("2007——", format!("{year:—<6}"));
assert_eq!("—2007—", format!("{year:—^6}"));
// Item::Fixed
let tz = datetime.format("%Z");
assert_eq!(" UTC", format!("{tz:>5}"));
assert_eq!("UTC ", format!("{tz:<5}"));
assert_eq!(" UTC ", format!("{tz:^5}"));
// [Item::Numeric, Item::Space, Item::Literal, Item::Space, Item::Numeric]
let ymd = datetime.format("%Y %B %d");
assert_eq!(" 2007 January 02", format!("{ymd:>17}"));
assert_eq!("2007 January 02 ", format!("{ymd:<17}"));
assert_eq!(" 2007 January 02 ", format!("{ymd:^17}"));
// Truncated
let time = datetime.format("%T%.6f");
assert_eq!("12:34:56.1234", format!("{time:.13}"));
}
#[test]
fn test_offset_formatting() {
fn check_all(precision: OffsetPrecision, expected: [[&str; 7]; 12]) {
fn check(
precision: OffsetPrecision,
colons: Colons,
padding: Pad,
allow_zulu: bool,
offsets: [FixedOffset; 7],
expected: [&str; 7],
) {
let offset_format = OffsetFormat { precision, colons, allow_zulu, padding };
for (offset, expected) in offsets.iter().zip(expected.iter()) {
let mut output = String::new();
offset_format.format(&mut output, *offset).unwrap();
assert_eq!(&output, expected);
}
}
// +03:45, -03:30, +11:00, -11:00:22, +02:34:26, -12:34:30, +00:00
let offsets = [
FixedOffset::east_opt(13_500).unwrap(),
FixedOffset::east_opt(-12_600).unwrap(),
FixedOffset::east_opt(39_600).unwrap(),
FixedOffset::east_opt(-39_622).unwrap(),
FixedOffset::east_opt(9266).unwrap(),
FixedOffset::east_opt(-45270).unwrap(),
FixedOffset::east_opt(0).unwrap(),
];
check(precision, Colons::Colon, Pad::Zero, false, offsets, expected[0]);
check(precision, Colons::Colon, Pad::Zero, true, offsets, expected[1]);
check(precision, Colons::Colon, Pad::Space, false, offsets, expected[2]);
check(precision, Colons::Colon, Pad::Space, true, offsets, expected[3]);
check(precision, Colons::Colon, Pad::None, false, offsets, expected[4]);
check(precision, Colons::Colon, Pad::None, true, offsets, expected[5]);
check(precision, Colons::None, Pad::Zero, false, offsets, expected[6]);
check(precision, Colons::None, Pad::Zero, true, offsets, expected[7]);
check(precision, Colons::None, Pad::Space, false, offsets, expected[8]);
check(precision, Colons::None, Pad::Space, true, offsets, expected[9]);
check(precision, Colons::None, Pad::None, false, offsets, expected[10]);
check(precision, Colons::None, Pad::None, true, offsets, expected[11]);
// `Colons::Maybe` should format the same as `Colons::None`
check(precision, Colons::Maybe, Pad::Zero, false, offsets, expected[6]);
check(precision, Colons::Maybe, Pad::Zero, true, offsets, expected[7]);
check(precision, Colons::Maybe, Pad::Space, false, offsets, expected[8]);
check(precision, Colons::Maybe, Pad::Space, true, offsets, expected[9]);
check(precision, Colons::Maybe, Pad::None, false, offsets, expected[10]);
check(precision, Colons::Maybe, Pad::None, true, offsets, expected[11]);
}
check_all(
OffsetPrecision::Hours,
[
["+03", "-03", "+11", "-11", "+02", "-12", "+00"],
["+03", "-03", "+11", "-11", "+02", "-12", "Z"],
[" +3", " -3", "+11", "-11", " +2", "-12", " +0"],
[" +3", " -3", "+11", "-11", " +2", "-12", "Z"],
["+3", "-3", "+11", "-11", "+2", "-12", "+0"],
["+3", "-3", "+11", "-11", "+2", "-12", "Z"],
["+03", "-03", "+11", "-11", "+02", "-12", "+00"],
["+03", "-03", "+11", "-11", "+02", "-12", "Z"],
[" +3", " -3", "+11", "-11", " +2", "-12", " +0"],
[" +3", " -3", "+11", "-11", " +2", "-12", "Z"],
["+3", "-3", "+11", "-11", "+2", "-12", "+0"],
["+3", "-3", "+11", "-11", "+2", "-12", "Z"],
],
);
check_all(
OffsetPrecision::Minutes,
[
["+03:45", "-03:30", "+11:00", "-11:00", "+02:34", "-12:35", "+00:00"],
["+03:45", "-03:30", "+11:00", "-11:00", "+02:34", "-12:35", "Z"],
[" +3:45", " -3:30", "+11:00", "-11:00", " +2:34", "-12:35", " +0:00"],
[" +3:45", " -3:30", "+11:00", "-11:00", " +2:34", "-12:35", "Z"],
["+3:45", "-3:30", "+11:00", "-11:00", "+2:34", "-12:35", "+0:00"],
["+3:45", "-3:30", "+11:00", "-11:00", "+2:34", "-12:35", "Z"],
["+0345", "-0330", "+1100", "-1100", "+0234", "-1235", "+0000"],
["+0345", "-0330", "+1100", "-1100", "+0234", "-1235", "Z"],
[" +345", " -330", "+1100", "-1100", " +234", "-1235", " +000"],
[" +345", " -330", "+1100", "-1100", " +234", "-1235", "Z"],
["+345", "-330", "+1100", "-1100", "+234", "-1235", "+000"],
["+345", "-330", "+1100", "-1100", "+234", "-1235", "Z"],
],
);
#[rustfmt::skip]
check_all(
OffsetPrecision::Seconds,
[
["+03:45:00", "-03:30:00", "+11:00:00", "-11:00:22", "+02:34:26", "-12:34:30", "+00:00:00"],
["+03:45:00", "-03:30:00", "+11:00:00", "-11:00:22", "+02:34:26", "-12:34:30", "Z"],
[" +3:45:00", " -3:30:00", "+11:00:00", "-11:00:22", " +2:34:26", "-12:34:30", " +0:00:00"],
[" +3:45:00", " -3:30:00", "+11:00:00", "-11:00:22", " +2:34:26", "-12:34:30", "Z"],
["+3:45:00", "-3:30:00", "+11:00:00", "-11:00:22", "+2:34:26", "-12:34:30", "+0:00:00"],
["+3:45:00", "-3:30:00", "+11:00:00", "-11:00:22", "+2:34:26", "-12:34:30", "Z"],
["+034500", "-033000", "+110000", "-110022", "+023426", "-123430", "+000000"],
["+034500", "-033000", "+110000", "-110022", "+023426", "-123430", "Z"],
[" +34500", " -33000", "+110000", "-110022", " +23426", "-123430", " +00000"],
[" +34500", " -33000", "+110000", "-110022", " +23426", "-123430", "Z"],
["+34500", "-33000", "+110000", "-110022", "+23426", "-123430", "+00000"],
["+34500", "-33000", "+110000", "-110022", "+23426", "-123430", "Z"],
],
);
check_all(
OffsetPrecision::OptionalMinutes,
[
["+03:45", "-03:30", "+11", "-11", "+02:34", "-12:35", "+00"],
["+03:45", "-03:30", "+11", "-11", "+02:34", "-12:35", "Z"],
[" +3:45", " -3:30", "+11", "-11", " +2:34", "-12:35", " +0"],
[" +3:45", " -3:30", "+11", "-11", " +2:34", "-12:35", "Z"],
["+3:45", "-3:30", "+11", "-11", "+2:34", "-12:35", "+0"],
["+3:45", "-3:30", "+11", "-11", "+2:34", "-12:35", "Z"],
["+0345", "-0330", "+11", "-11", "+0234", "-1235", "+00"],
["+0345", "-0330", "+11", "-11", "+0234", "-1235", "Z"],
[" +345", " -330", "+11", "-11", " +234", "-1235", " +0"],
[" +345", " -330", "+11", "-11", " +234", "-1235", "Z"],
["+345", "-330", "+11", "-11", "+234", "-1235", "+0"],
["+345", "-330", "+11", "-11", "+234", "-1235", "Z"],
],
);
check_all(
OffsetPrecision::OptionalSeconds,
[
["+03:45", "-03:30", "+11:00", "-11:00:22", "+02:34:26", "-12:34:30", "+00:00"],
["+03:45", "-03:30", "+11:00", "-11:00:22", "+02:34:26", "-12:34:30", "Z"],
[" +3:45", " -3:30", "+11:00", "-11:00:22", " +2:34:26", "-12:34:30", " +0:00"],
[" +3:45", " -3:30", "+11:00", "-11:00:22", " +2:34:26", "-12:34:30", "Z"],
["+3:45", "-3:30", "+11:00", "-11:00:22", "+2:34:26", "-12:34:30", "+0:00"],
["+3:45", "-3:30", "+11:00", "-11:00:22", "+2:34:26", "-12:34:30", "Z"],
["+0345", "-0330", "+1100", "-110022", "+023426", "-123430", "+0000"],
["+0345", "-0330", "+1100", "-110022", "+023426", "-123430", "Z"],
[" +345", " -330", "+1100", "-110022", " +23426", "-123430", " +000"],
[" +345", " -330", "+1100", "-110022", " +23426", "-123430", "Z"],
["+345", "-330", "+1100", "-110022", "+23426", "-123430", "+000"],
["+345", "-330", "+1100", "-110022", "+23426", "-123430", "Z"],
],
);
check_all(
OffsetPrecision::OptionalMinutesAndSeconds,
[
["+03:45", "-03:30", "+11", "-11:00:22", "+02:34:26", "-12:34:30", "+00"],
["+03:45", "-03:30", "+11", "-11:00:22", "+02:34:26", "-12:34:30", "Z"],
[" +3:45", " -3:30", "+11", "-11:00:22", " +2:34:26", "-12:34:30", " +0"],
[" +3:45", " -3:30", "+11", "-11:00:22", " +2:34:26", "-12:34:30", "Z"],
["+3:45", "-3:30", "+11", "-11:00:22", "+2:34:26", "-12:34:30", "+0"],
["+3:45", "-3:30", "+11", "-11:00:22", "+2:34:26", "-12:34:30", "Z"],
["+0345", "-0330", "+11", "-110022", "+023426", "-123430", "+00"],
["+0345", "-0330", "+11", "-110022", "+023426", "-123430", "Z"],
[" +345", " -330", "+11", "-110022", " +23426", "-123430", " +0"],
[" +345", " -330", "+11", "-110022", " +23426", "-123430", "Z"],
["+345", "-330", "+11", "-110022", "+23426", "-123430", "+0"],
["+345", "-330", "+11", "-110022", "+23426", "-123430", "Z"],
],
);
}
}

View File

@@ -0,0 +1,103 @@
#[cfg(feature = "unstable-locales")]
mod localized {
use pure_rust_locales::{Locale, locale_match};
pub(crate) const fn default_locale() -> Locale {
Locale::POSIX
}
pub(crate) const fn short_months(locale: Locale) -> &'static [&'static str] {
locale_match!(locale => LC_TIME::ABMON)
}
pub(crate) const fn long_months(locale: Locale) -> &'static [&'static str] {
locale_match!(locale => LC_TIME::MON)
}
pub(crate) const fn short_weekdays(locale: Locale) -> &'static [&'static str] {
locale_match!(locale => LC_TIME::ABDAY)
}
pub(crate) const fn long_weekdays(locale: Locale) -> &'static [&'static str] {
locale_match!(locale => LC_TIME::DAY)
}
pub(crate) const fn am_pm(locale: Locale) -> &'static [&'static str] {
locale_match!(locale => LC_TIME::AM_PM)
}
pub(crate) const fn decimal_point(locale: Locale) -> &'static str {
locale_match!(locale => LC_NUMERIC::DECIMAL_POINT)
}
pub(crate) const fn d_fmt(locale: Locale) -> &'static str {
locale_match!(locale => LC_TIME::D_FMT)
}
pub(crate) const fn d_t_fmt(locale: Locale) -> &'static str {
locale_match!(locale => LC_TIME::D_T_FMT)
}
pub(crate) const fn t_fmt(locale: Locale) -> &'static str {
locale_match!(locale => LC_TIME::T_FMT)
}
pub(crate) const fn t_fmt_ampm(locale: Locale) -> &'static str {
locale_match!(locale => LC_TIME::T_FMT_AMPM)
}
}
#[cfg(feature = "unstable-locales")]
pub(crate) use localized::*;
#[cfg(feature = "unstable-locales")]
pub use pure_rust_locales::Locale;
#[cfg(not(feature = "unstable-locales"))]
mod unlocalized {
#[derive(Copy, Clone, Debug)]
pub(crate) struct Locale;
pub(crate) const fn default_locale() -> Locale {
Locale
}
pub(crate) const fn short_months(_locale: Locale) -> &'static [&'static str] {
&["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
}
pub(crate) const fn long_months(_locale: Locale) -> &'static [&'static str] {
&[
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December",
]
}
pub(crate) const fn short_weekdays(_locale: Locale) -> &'static [&'static str] {
&["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]
}
pub(crate) const fn long_weekdays(_locale: Locale) -> &'static [&'static str] {
&["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"]
}
pub(crate) const fn am_pm(_locale: Locale) -> &'static [&'static str] {
&["AM", "PM"]
}
pub(crate) const fn decimal_point(_locale: Locale) -> &'static str {
"."
}
}
#[cfg(not(feature = "unstable-locales"))]
pub(crate) use unlocalized::*;

View File

@@ -0,0 +1,557 @@
// This is a part of Chrono.
// See README.md and LICENSE.txt for details.
//! Formatting (and parsing) utilities for date and time.
//!
//! This module provides the common types and routines to implement,
//! for example, [`DateTime::format`](../struct.DateTime.html#method.format) or
//! [`DateTime::parse_from_str`](../struct.DateTime.html#method.parse_from_str) methods.
//! For most cases you should use these high-level interfaces.
//!
//! Internally the formatting and parsing shares the same abstract **formatting items**,
//! which are just an [`Iterator`](https://doc.rust-lang.org/std/iter/trait.Iterator.html) of
//! the [`Item`](./enum.Item.html) type.
//! They are generated from more readable **format strings**;
//! currently Chrono supports a built-in syntax closely resembling
//! C's `strftime` format. The available options can be found [here](./strftime/index.html).
//!
//! # Example
//! ```
//! # #[cfg(feature = "alloc")] {
//! use chrono::{NaiveDateTime, TimeZone, Utc};
//!
//! let date_time = Utc.with_ymd_and_hms(2020, 11, 10, 0, 1, 32).unwrap();
//!
//! let formatted = format!("{}", date_time.format("%Y-%m-%d %H:%M:%S"));
//! assert_eq!(formatted, "2020-11-10 00:01:32");
//!
//! let parsed = NaiveDateTime::parse_from_str(&formatted, "%Y-%m-%d %H:%M:%S")?.and_utc();
//! assert_eq!(parsed, date_time);
//! # }
//! # Ok::<(), chrono::ParseError>(())
//! ```
#[cfg(all(feature = "alloc", not(feature = "std"), not(test)))]
use alloc::boxed::Box;
use core::fmt;
use core::str::FromStr;
#[cfg(feature = "std")]
use std::error::Error;
use crate::{Month, ParseMonthError, ParseWeekdayError, Weekday};
mod formatting;
mod parsed;
// due to the size of parsing routines, they are in separate modules.
mod parse;
pub(crate) mod scan;
pub mod strftime;
#[allow(unused)]
// TODO: remove '#[allow(unused)]' once we use this module for parsing or something else that does
// not require `alloc`.
pub(crate) mod locales;
pub use formatting::SecondsFormat;
pub(crate) use formatting::write_hundreds;
#[cfg(feature = "alloc")]
pub(crate) use formatting::write_rfc2822;
#[cfg(any(feature = "alloc", feature = "serde"))]
pub(crate) use formatting::write_rfc3339;
#[cfg(feature = "alloc")]
#[allow(deprecated)]
pub use formatting::{DelayedFormat, format, format_item};
#[cfg(feature = "unstable-locales")]
pub use locales::Locale;
pub(crate) use parse::parse_rfc3339;
pub use parse::{parse, parse_and_remainder};
pub use parsed::Parsed;
pub use strftime::StrftimeItems;
/// An uninhabited type used for `InternalNumeric` and `InternalFixed` below.
#[derive(Clone, PartialEq, Eq, Hash)]
enum Void {}
/// Padding characters for numeric items.
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
pub enum Pad {
/// No padding.
None,
/// Zero (`0`) padding.
Zero,
/// Space padding.
Space,
}
/// Numeric item types.
/// They have associated formatting width (FW) and parsing width (PW).
///
/// The **formatting width** is the minimal width to be formatted.
/// If the number is too short, and the padding is not [`Pad::None`](./enum.Pad.html#variant.None),
/// then it is left-padded.
/// If the number is too long or (in some cases) negative, it is printed as is.
///
/// The **parsing width** is the maximal width to be scanned.
/// The parser only tries to consume from one to given number of digits (greedily).
/// It also trims the preceding whitespace if any.
/// It cannot parse the negative number, so some date and time cannot be formatted then
/// parsed with the same formatting items.
#[non_exhaustive]
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub enum Numeric {
/// Full Gregorian year (FW=4, PW=∞).
/// May accept years before 1 BCE or after 9999 CE, given an initial sign (+/-).
Year,
/// Gregorian year divided by 100 (century number; FW=PW=2). Implies the non-negative year.
YearDiv100,
/// Gregorian year modulo 100 (FW=PW=2). Cannot be negative.
YearMod100,
/// Year in the ISO week date (FW=4, PW=∞).
/// May accept years before 1 BCE or after 9999 CE, given an initial sign.
IsoYear,
/// Year in the ISO week date, divided by 100 (FW=PW=2). Implies the non-negative year.
IsoYearDiv100,
/// Year in the ISO week date, modulo 100 (FW=PW=2). Cannot be negative.
IsoYearMod100,
/// Quarter (FW=PW=1).
Quarter,
/// Month (FW=PW=2).
Month,
/// Day of the month (FW=PW=2).
Day,
/// Week number, where the week 1 starts at the first Sunday of January (FW=PW=2).
WeekFromSun,
/// Week number, where the week 1 starts at the first Monday of January (FW=PW=2).
WeekFromMon,
/// Week number in the ISO week date (FW=PW=2).
IsoWeek,
/// Day of the week, where Sunday = 0 and Saturday = 6 (FW=PW=1).
NumDaysFromSun,
/// Day of the week, where Monday = 1 and Sunday = 7 (FW=PW=1).
WeekdayFromMon,
/// Day of the year (FW=PW=3).
Ordinal,
/// Hour number in the 24-hour clocks (FW=PW=2).
Hour,
/// Hour number in the 12-hour clocks (FW=PW=2).
Hour12,
/// The number of minutes since the last whole hour (FW=PW=2).
Minute,
/// The number of seconds since the last whole minute (FW=PW=2).
Second,
/// The number of nanoseconds since the last whole second (FW=PW=9).
/// Note that this is *not* left-aligned;
/// see also [`Fixed::Nanosecond`](./enum.Fixed.html#variant.Nanosecond).
Nanosecond,
/// The number of non-leap seconds since the midnight UTC on January 1, 1970 (FW=1, PW=∞).
/// For formatting, it assumes UTC upon the absence of time zone offset.
Timestamp,
/// Internal uses only.
///
/// This item exists so that one can add additional internal-only formatting
/// without breaking major compatibility (as enum variants cannot be selectively private).
Internal(InternalNumeric),
}
/// An opaque type representing numeric item types for internal uses only.
#[derive(Clone, Eq, Hash, PartialEq)]
pub struct InternalNumeric {
_dummy: Void,
}
impl fmt::Debug for InternalNumeric {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "<InternalNumeric>")
}
}
/// Fixed-format item types.
///
/// They have their own rules of formatting and parsing.
/// Otherwise noted, they print in the specified cases but parse case-insensitively.
#[non_exhaustive]
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub enum Fixed {
/// Abbreviated month names.
///
/// Prints a three-letter-long name in the title case, reads the same name in any case.
ShortMonthName,
/// Full month names.
///
/// Prints a full name in the title case, reads either a short or full name in any case.
LongMonthName,
/// Abbreviated day of the week names.
///
/// Prints a three-letter-long name in the title case, reads the same name in any case.
ShortWeekdayName,
/// Full day of the week names.
///
/// Prints a full name in the title case, reads either a short or full name in any case.
LongWeekdayName,
/// AM/PM.
///
/// Prints in lower case, reads in any case.
LowerAmPm,
/// AM/PM.
///
/// Prints in upper case, reads in any case.
UpperAmPm,
/// An optional dot plus one or more digits for left-aligned nanoseconds.
/// May print nothing, 3, 6 or 9 digits according to the available accuracy.
/// See also [`Numeric::Nanosecond`](./enum.Numeric.html#variant.Nanosecond).
Nanosecond,
/// Same as [`Nanosecond`](#variant.Nanosecond) but the accuracy is fixed to 3.
Nanosecond3,
/// Same as [`Nanosecond`](#variant.Nanosecond) but the accuracy is fixed to 6.
Nanosecond6,
/// Same as [`Nanosecond`](#variant.Nanosecond) but the accuracy is fixed to 9.
Nanosecond9,
/// Timezone name.
///
/// It does not support parsing, its use in the parser is an immediate failure.
TimezoneName,
/// Offset from the local time to UTC (`+09:00` or `-04:00` or `+00:00`).
///
/// In the parser, the colon can be omitted and/or surrounded with any amount of whitespace.
/// The offset is limited from `-24:00` to `+24:00`,
/// which is the same as [`FixedOffset`](../offset/struct.FixedOffset.html)'s range.
TimezoneOffsetColon,
/// Offset from the local time to UTC with seconds (`+09:00:00` or `-04:00:00` or `+00:00:00`).
///
/// In the parser, the colon can be omitted and/or surrounded with any amount of whitespace.
/// The offset is limited from `-24:00:00` to `+24:00:00`,
/// which is the same as [`FixedOffset`](../offset/struct.FixedOffset.html)'s range.
TimezoneOffsetDoubleColon,
/// Offset from the local time to UTC without minutes (`+09` or `-04` or `+00`).
///
/// In the parser, the colon can be omitted and/or surrounded with any amount of whitespace.
/// The offset is limited from `-24` to `+24`,
/// which is the same as [`FixedOffset`](../offset/struct.FixedOffset.html)'s range.
TimezoneOffsetTripleColon,
/// Offset from the local time to UTC (`+09:00` or `-04:00` or `Z`).
///
/// In the parser, the colon can be omitted and/or surrounded with any amount of whitespace,
/// and `Z` can be either in upper case or in lower case.
/// The offset is limited from `-24:00` to `+24:00`,
/// which is the same as [`FixedOffset`](../offset/struct.FixedOffset.html)'s range.
TimezoneOffsetColonZ,
/// Same as [`TimezoneOffsetColon`](#variant.TimezoneOffsetColon) but prints no colon.
/// Parsing allows an optional colon.
TimezoneOffset,
/// Same as [`TimezoneOffsetColonZ`](#variant.TimezoneOffsetColonZ) but prints no colon.
/// Parsing allows an optional colon.
TimezoneOffsetZ,
/// RFC 2822 date and time syntax. Commonly used for email and MIME date and time.
RFC2822,
/// RFC 3339 & ISO 8601 date and time syntax.
RFC3339,
/// Internal uses only.
///
/// This item exists so that one can add additional internal-only formatting
/// without breaking major compatibility (as enum variants cannot be selectively private).
Internal(InternalFixed),
}
/// An opaque type representing fixed-format item types for internal uses only.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct InternalFixed {
val: InternalInternal,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
enum InternalInternal {
/// Same as [`TimezoneOffsetColonZ`](#variant.TimezoneOffsetColonZ), but
/// allows missing minutes (per [ISO 8601][iso8601]).
///
/// # Panics
///
/// If you try to use this for printing.
///
/// [iso8601]: https://en.wikipedia.org/wiki/ISO_8601#Time_offsets_from_UTC
TimezoneOffsetPermissive,
/// Same as [`Nanosecond`](#variant.Nanosecond) but the accuracy is fixed to 3 and there is no leading dot.
Nanosecond3NoDot,
/// Same as [`Nanosecond`](#variant.Nanosecond) but the accuracy is fixed to 6 and there is no leading dot.
Nanosecond6NoDot,
/// Same as [`Nanosecond`](#variant.Nanosecond) but the accuracy is fixed to 9 and there is no leading dot.
Nanosecond9NoDot,
}
/// Type for specifying the format of UTC offsets.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct OffsetFormat {
/// See `OffsetPrecision`.
pub precision: OffsetPrecision,
/// Separator between hours, minutes and seconds.
pub colons: Colons,
/// Represent `+00:00` as `Z`.
pub allow_zulu: bool,
/// Pad the hour value to two digits.
pub padding: Pad,
}
/// The precision of an offset from UTC formatting item.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum OffsetPrecision {
/// Format offset from UTC as only hours. Not recommended, it is not uncommon for timezones to
/// have an offset of 30 minutes, 15 minutes, etc.
/// Any minutes and seconds get truncated.
Hours,
/// Format offset from UTC as hours and minutes.
/// Any seconds will be rounded to the nearest minute.
Minutes,
/// Format offset from UTC as hours, minutes and seconds.
Seconds,
/// Format offset from UTC as hours, and optionally with minutes.
/// Any seconds will be rounded to the nearest minute.
OptionalMinutes,
/// Format offset from UTC as hours and minutes, and optionally seconds.
OptionalSeconds,
/// Format offset from UTC as hours and optionally minutes and seconds.
OptionalMinutesAndSeconds,
}
/// The separator between hours and minutes in an offset.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum Colons {
/// No separator
None,
/// Colon (`:`) as separator
Colon,
/// No separator when formatting, colon allowed when parsing.
Maybe,
}
/// A single formatting item. This is used for both formatting and parsing.
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub enum Item<'a> {
/// A literally printed and parsed text.
Literal(&'a str),
/// Same as `Literal` but with the string owned by the item.
#[cfg(feature = "alloc")]
OwnedLiteral(Box<str>),
/// Whitespace. Prints literally but reads zero or more whitespace.
Space(&'a str),
/// Same as `Space` but with the string owned by the item.
#[cfg(feature = "alloc")]
OwnedSpace(Box<str>),
/// Numeric item. Can be optionally padded to the maximal length (if any) when formatting;
/// the parser simply ignores any padded whitespace and zeroes.
Numeric(Numeric, Pad),
/// Fixed-format item.
Fixed(Fixed),
/// Issues a formatting error. Used to signal an invalid format string.
Error,
}
const fn num(numeric: Numeric) -> Item<'static> {
Item::Numeric(numeric, Pad::None)
}
const fn num0(numeric: Numeric) -> Item<'static> {
Item::Numeric(numeric, Pad::Zero)
}
const fn nums(numeric: Numeric) -> Item<'static> {
Item::Numeric(numeric, Pad::Space)
}
const fn fixed(fixed: Fixed) -> Item<'static> {
Item::Fixed(fixed)
}
const fn internal_fixed(val: InternalInternal) -> Item<'static> {
Item::Fixed(Fixed::Internal(InternalFixed { val }))
}
impl Item<'_> {
/// Convert items that contain a reference to the format string into an owned variant.
#[cfg(any(feature = "alloc", feature = "std"))]
pub fn to_owned(self) -> Item<'static> {
match self {
Item::Literal(s) => Item::OwnedLiteral(Box::from(s)),
Item::Space(s) => Item::OwnedSpace(Box::from(s)),
Item::Numeric(n, p) => Item::Numeric(n, p),
Item::Fixed(f) => Item::Fixed(f),
Item::OwnedLiteral(l) => Item::OwnedLiteral(l),
Item::OwnedSpace(s) => Item::OwnedSpace(s),
Item::Error => Item::Error,
}
}
}
/// An error from the `parse` function.
#[derive(Debug, Clone, PartialEq, Eq, Copy, Hash)]
pub struct ParseError(ParseErrorKind);
impl ParseError {
/// The category of parse error
pub const fn kind(&self) -> ParseErrorKind {
self.0
}
}
/// The category of parse error
#[allow(clippy::manual_non_exhaustive)]
#[derive(Debug, Clone, PartialEq, Eq, Copy, Hash)]
pub enum ParseErrorKind {
/// Given field is out of permitted range.
OutOfRange,
/// There is no possible date and time value with given set of fields.
///
/// This does not include the out-of-range conditions, which are trivially invalid.
/// It includes the case that there are one or more fields that are inconsistent to each other.
Impossible,
/// Given set of fields is not enough to make a requested date and time value.
///
/// Note that there *may* be a case that given fields constrain the possible values so much
/// that there is a unique possible value. Chrono only tries to be correct for
/// most useful sets of fields however, as such constraint solving can be expensive.
NotEnough,
/// The input string has some invalid character sequence for given formatting items.
Invalid,
/// The input string has been prematurely ended.
TooShort,
/// All formatting items have been read but there is a remaining input.
TooLong,
/// There was an error on the formatting string, or there were non-supported formatting items.
BadFormat,
// TODO: Change this to `#[non_exhaustive]` (on the enum) with the next breaking release.
#[doc(hidden)]
__Nonexhaustive,
}
/// Same as `Result<T, ParseError>`.
pub type ParseResult<T> = Result<T, ParseError>;
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.0 {
ParseErrorKind::OutOfRange => write!(f, "input is out of range"),
ParseErrorKind::Impossible => write!(f, "no possible date and time matching input"),
ParseErrorKind::NotEnough => write!(f, "input is not enough for unique date and time"),
ParseErrorKind::Invalid => write!(f, "input contains invalid characters"),
ParseErrorKind::TooShort => write!(f, "premature end of input"),
ParseErrorKind::TooLong => write!(f, "trailing input"),
ParseErrorKind::BadFormat => write!(f, "bad or unsupported format string"),
_ => unreachable!(),
}
}
}
#[cfg(feature = "std")]
impl Error for ParseError {
#[allow(deprecated)]
fn description(&self) -> &str {
"parser error, see to_string() for details"
}
}
// to be used in this module and submodules
pub(crate) const OUT_OF_RANGE: ParseError = ParseError(ParseErrorKind::OutOfRange);
const IMPOSSIBLE: ParseError = ParseError(ParseErrorKind::Impossible);
const NOT_ENOUGH: ParseError = ParseError(ParseErrorKind::NotEnough);
const INVALID: ParseError = ParseError(ParseErrorKind::Invalid);
const TOO_SHORT: ParseError = ParseError(ParseErrorKind::TooShort);
pub(crate) const TOO_LONG: ParseError = ParseError(ParseErrorKind::TooLong);
const BAD_FORMAT: ParseError = ParseError(ParseErrorKind::BadFormat);
// this implementation is here only because we need some private code from `scan`
/// Parsing a `str` into a `Weekday` uses the format [`%A`](./format/strftime/index.html).
///
/// # Example
///
/// ```
/// use chrono::Weekday;
///
/// assert_eq!("Sunday".parse::<Weekday>(), Ok(Weekday::Sun));
/// assert!("any day".parse::<Weekday>().is_err());
/// ```
///
/// The parsing is case-insensitive.
///
/// ```
/// # use chrono::Weekday;
/// assert_eq!("mON".parse::<Weekday>(), Ok(Weekday::Mon));
/// ```
///
/// Only the shortest form (e.g. `sun`) and the longest form (e.g. `sunday`) is accepted.
///
/// ```
/// # use chrono::Weekday;
/// assert!("thurs".parse::<Weekday>().is_err());
/// ```
impl FromStr for Weekday {
type Err = ParseWeekdayError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if let Ok(("", w)) = scan::short_or_long_weekday(s) {
Ok(w)
} else {
Err(ParseWeekdayError { _dummy: () })
}
}
}
/// Parsing a `str` into a `Month` uses the format [`%B`](./format/strftime/index.html).
///
/// # Example
///
/// ```
/// use chrono::Month;
///
/// assert_eq!("January".parse::<Month>(), Ok(Month::January));
/// assert!("any day".parse::<Month>().is_err());
/// ```
///
/// The parsing is case-insensitive.
///
/// ```
/// # use chrono::Month;
/// assert_eq!("fEbruARy".parse::<Month>(), Ok(Month::February));
/// ```
///
/// Only the shortest form (e.g. `jan`) and the longest form (e.g. `january`) is accepted.
///
/// ```
/// # use chrono::Month;
/// assert!("septem".parse::<Month>().is_err());
/// assert!("Augustin".parse::<Month>().is_err());
/// ```
impl FromStr for Month {
type Err = ParseMonthError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if let Ok(("", w)) = scan::short_or_long_month0(s) {
match w {
0 => Ok(Month::January),
1 => Ok(Month::February),
2 => Ok(Month::March),
3 => Ok(Month::April),
4 => Ok(Month::May),
5 => Ok(Month::June),
6 => Ok(Month::July),
7 => Ok(Month::August),
8 => Ok(Month::September),
9 => Ok(Month::October),
10 => Ok(Month::November),
11 => Ok(Month::December),
_ => Err(ParseMonthError { _dummy: () }),
}
} else {
Err(ParseMonthError { _dummy: () })
}
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,431 @@
// This is a part of Chrono.
// See README.md and LICENSE.txt for details.
/*!
* Various scanning routines for the parser.
*/
use super::{INVALID, OUT_OF_RANGE, ParseResult, TOO_SHORT};
use crate::Weekday;
/// Tries to parse the non-negative number from `min` to `max` digits.
///
/// The absence of digits at all is an unconditional error.
/// More than `max` digits are consumed up to the first `max` digits.
/// Any number that does not fit in `i64` is an error.
#[inline]
pub(super) fn number(s: &str, min: usize, max: usize) -> ParseResult<(&str, i64)> {
assert!(min <= max);
// We are only interested in ascii numbers, so we can work with the `str` as bytes. We stop on
// the first non-numeric byte, which may be another ascii character or beginning of multi-byte
// UTF-8 character.
let bytes = s.as_bytes();
if bytes.len() < min {
return Err(TOO_SHORT);
}
let mut n = 0i64;
for (i, c) in bytes.iter().take(max).cloned().enumerate() {
// cloned() = copied()
if !c.is_ascii_digit() {
if i < min {
return Err(INVALID);
} else {
return Ok((&s[i..], n));
}
}
n = match n.checked_mul(10).and_then(|n| n.checked_add((c - b'0') as i64)) {
Some(n) => n,
None => return Err(OUT_OF_RANGE),
};
}
Ok((&s[core::cmp::min(max, bytes.len())..], n))
}
/// Tries to consume at least one digits as a fractional second.
/// Returns the number of whole nanoseconds (0--999,999,999).
pub(super) fn nanosecond(s: &str) -> ParseResult<(&str, i64)> {
// record the number of digits consumed for later scaling.
let origlen = s.len();
let (s, v) = number(s, 1, 9)?;
let consumed = origlen - s.len();
// scale the number accordingly.
static SCALE: [i64; 10] =
[0, 100_000_000, 10_000_000, 1_000_000, 100_000, 10_000, 1_000, 100, 10, 1];
let v = v.checked_mul(SCALE[consumed]).ok_or(OUT_OF_RANGE)?;
// if there are more than 9 digits, skip next digits.
let s = s.trim_start_matches(|c: char| c.is_ascii_digit());
Ok((s, v))
}
/// Tries to consume a fixed number of digits as a fractional second.
/// Returns the number of whole nanoseconds (0--999,999,999).
pub(super) fn nanosecond_fixed(s: &str, digits: usize) -> ParseResult<(&str, i64)> {
// record the number of digits consumed for later scaling.
let (s, v) = number(s, digits, digits)?;
// scale the number accordingly.
static SCALE: [i64; 10] =
[0, 100_000_000, 10_000_000, 1_000_000, 100_000, 10_000, 1_000, 100, 10, 1];
let v = v.checked_mul(SCALE[digits]).ok_or(OUT_OF_RANGE)?;
Ok((s, v))
}
/// Tries to parse the month index (0 through 11) with the first three ASCII letters.
pub(super) fn short_month0(s: &str) -> ParseResult<(&str, u8)> {
if s.len() < 3 {
return Err(TOO_SHORT);
}
let buf = s.as_bytes();
let month0 = match (buf[0] | 32, buf[1] | 32, buf[2] | 32) {
(b'j', b'a', b'n') => 0,
(b'f', b'e', b'b') => 1,
(b'm', b'a', b'r') => 2,
(b'a', b'p', b'r') => 3,
(b'm', b'a', b'y') => 4,
(b'j', b'u', b'n') => 5,
(b'j', b'u', b'l') => 6,
(b'a', b'u', b'g') => 7,
(b's', b'e', b'p') => 8,
(b'o', b'c', b't') => 9,
(b'n', b'o', b'v') => 10,
(b'd', b'e', b'c') => 11,
_ => return Err(INVALID),
};
Ok((&s[3..], month0))
}
/// Tries to parse the weekday with the first three ASCII letters.
pub(super) fn short_weekday(s: &str) -> ParseResult<(&str, Weekday)> {
if s.len() < 3 {
return Err(TOO_SHORT);
}
let buf = s.as_bytes();
let weekday = match (buf[0] | 32, buf[1] | 32, buf[2] | 32) {
(b'm', b'o', b'n') => Weekday::Mon,
(b't', b'u', b'e') => Weekday::Tue,
(b'w', b'e', b'd') => Weekday::Wed,
(b't', b'h', b'u') => Weekday::Thu,
(b'f', b'r', b'i') => Weekday::Fri,
(b's', b'a', b't') => Weekday::Sat,
(b's', b'u', b'n') => Weekday::Sun,
_ => return Err(INVALID),
};
Ok((&s[3..], weekday))
}
/// Tries to parse the month index (0 through 11) with short or long month names.
/// It prefers long month names to short month names when both are possible.
pub(super) fn short_or_long_month0(s: &str) -> ParseResult<(&str, u8)> {
// lowercased month names, minus first three chars
static LONG_MONTH_SUFFIXES: [&[u8]; 12] = [
b"uary", b"ruary", b"ch", b"il", b"", b"e", b"y", b"ust", b"tember", b"ober", b"ember",
b"ember",
];
let (mut s, month0) = short_month0(s)?;
// tries to consume the suffix if possible
let suffix = LONG_MONTH_SUFFIXES[month0 as usize];
if s.len() >= suffix.len() && s.as_bytes()[..suffix.len()].eq_ignore_ascii_case(suffix) {
s = &s[suffix.len()..];
}
Ok((s, month0))
}
/// Tries to parse the weekday with short or long weekday names.
/// It prefers long weekday names to short weekday names when both are possible.
pub(super) fn short_or_long_weekday(s: &str) -> ParseResult<(&str, Weekday)> {
// lowercased weekday names, minus first three chars
static LONG_WEEKDAY_SUFFIXES: [&[u8]; 7] =
[b"day", b"sday", b"nesday", b"rsday", b"day", b"urday", b"day"];
let (mut s, weekday) = short_weekday(s)?;
// tries to consume the suffix if possible
let suffix = LONG_WEEKDAY_SUFFIXES[weekday.num_days_from_monday() as usize];
if s.len() >= suffix.len() && s.as_bytes()[..suffix.len()].eq_ignore_ascii_case(suffix) {
s = &s[suffix.len()..];
}
Ok((s, weekday))
}
/// Tries to consume exactly one given character.
pub(super) fn char(s: &str, c1: u8) -> ParseResult<&str> {
match s.as_bytes().first() {
Some(&c) if c == c1 => Ok(&s[1..]),
Some(_) => Err(INVALID),
None => Err(TOO_SHORT),
}
}
/// Tries to consume one or more whitespace.
pub(super) fn space(s: &str) -> ParseResult<&str> {
let s_ = s.trim_start();
if s_.len() < s.len() {
Ok(s_)
} else if s.is_empty() {
Err(TOO_SHORT)
} else {
Err(INVALID)
}
}
/// Consumes any number (including zero) of colon or spaces.
pub(crate) fn colon_or_space(s: &str) -> ParseResult<&str> {
Ok(s.trim_start_matches(|c: char| c == ':' || c.is_whitespace()))
}
/// Parse a timezone from `s` and return the offset in seconds.
///
/// The `consume_colon` function is used to parse a mandatory or optional `:`
/// separator between hours offset and minutes offset.
///
/// The `allow_missing_minutes` flag allows the timezone minutes offset to be
/// missing from `s`.
///
/// The `allow_tz_minus_sign` flag allows the timezone offset negative character
/// to also be `` MINUS SIGN (U+2212) in addition to the typical
/// ASCII-compatible `-` HYPHEN-MINUS (U+2D).
/// This is part of [RFC 3339 & ISO 8601].
///
/// [RFC 3339 & ISO 8601]: https://en.wikipedia.org/w/index.php?title=ISO_8601&oldid=1114309368#Time_offsets_from_UTC
pub(crate) fn timezone_offset<F>(
mut s: &str,
mut consume_colon: F,
allow_zulu: bool,
allow_missing_minutes: bool,
allow_tz_minus_sign: bool,
) -> ParseResult<(&str, i32)>
where
F: FnMut(&str) -> ParseResult<&str>,
{
if allow_zulu {
if let Some(&b'Z' | &b'z') = s.as_bytes().first() {
return Ok((&s[1..], 0));
}
}
const fn digits(s: &str) -> ParseResult<(u8, u8)> {
let b = s.as_bytes();
if b.len() < 2 { Err(TOO_SHORT) } else { Ok((b[0], b[1])) }
}
let negative = match s.chars().next() {
Some('+') => {
// PLUS SIGN (U+2B)
s = &s['+'.len_utf8()..];
false
}
Some('-') => {
// HYPHEN-MINUS (U+2D)
s = &s['-'.len_utf8()..];
true
}
Some('') => {
// MINUS SIGN (U+2212)
if !allow_tz_minus_sign {
return Err(INVALID);
}
s = &s[''.len_utf8()..];
true
}
Some(_) => return Err(INVALID),
None => return Err(TOO_SHORT),
};
// hours (00--99)
let hours = match digits(s)? {
(h1 @ b'0'..=b'9', h2 @ b'0'..=b'9') => i32::from((h1 - b'0') * 10 + (h2 - b'0')),
_ => return Err(INVALID),
};
s = &s[2..];
// colons (and possibly other separators)
s = consume_colon(s)?;
// minutes (00--59)
// if the next two items are digits then we have to add minutes
let minutes = if let Ok(ds) = digits(s) {
match ds {
(m1 @ b'0'..=b'5', m2 @ b'0'..=b'9') => i32::from((m1 - b'0') * 10 + (m2 - b'0')),
(b'6'..=b'9', b'0'..=b'9') => return Err(OUT_OF_RANGE),
_ => return Err(INVALID),
}
} else if allow_missing_minutes {
0
} else {
return Err(TOO_SHORT);
};
s = match s.len() {
len if len >= 2 => &s[2..],
0 => s,
_ => return Err(TOO_SHORT),
};
let seconds = hours * 3600 + minutes * 60;
Ok((s, if negative { -seconds } else { seconds }))
}
/// Same as `timezone_offset` but also allows for RFC 2822 legacy timezones.
/// May return `None` which indicates an insufficient offset data (i.e. `-0000`).
/// See [RFC 2822 Section 4.3].
///
/// [RFC 2822 Section 4.3]: https://tools.ietf.org/html/rfc2822#section-4.3
pub(super) fn timezone_offset_2822(s: &str) -> ParseResult<(&str, i32)> {
// tries to parse legacy time zone names
let upto = s.as_bytes().iter().position(|&c| !c.is_ascii_alphabetic()).unwrap_or(s.len());
if upto > 0 {
let name = &s.as_bytes()[..upto];
let s = &s[upto..];
let offset_hours = |o| Ok((s, o * 3600));
// RFC 2822 requires support for some named North America timezones, a small subset of all
// named timezones.
if name.eq_ignore_ascii_case(b"gmt")
|| name.eq_ignore_ascii_case(b"ut")
|| name.eq_ignore_ascii_case(b"z")
{
return offset_hours(0);
} else if name.eq_ignore_ascii_case(b"edt") {
return offset_hours(-4);
} else if name.eq_ignore_ascii_case(b"est") || name.eq_ignore_ascii_case(b"cdt") {
return offset_hours(-5);
} else if name.eq_ignore_ascii_case(b"cst") || name.eq_ignore_ascii_case(b"mdt") {
return offset_hours(-6);
} else if name.eq_ignore_ascii_case(b"mst") || name.eq_ignore_ascii_case(b"pdt") {
return offset_hours(-7);
} else if name.eq_ignore_ascii_case(b"pst") {
return offset_hours(-8);
} else if name.len() == 1 {
if let b'a'..=b'i' | b'k'..=b'y' | b'A'..=b'I' | b'K'..=b'Y' = name[0] {
// recommended by RFC 2822: consume but treat it as -0000
return Ok((s, 0));
}
}
Err(INVALID)
} else {
timezone_offset(s, |s| Ok(s), false, false, false)
}
}
/// Tries to consume an RFC2822 comment including preceding ` `.
///
/// Returns the remaining string after the closing parenthesis.
pub(super) fn comment_2822(s: &str) -> ParseResult<(&str, ())> {
use CommentState::*;
let s = s.trim_start();
let mut state = Start;
for (i, c) in s.bytes().enumerate() {
state = match (state, c) {
(Start, b'(') => Next(1),
(Next(1), b')') => return Ok((&s[i + 1..], ())),
(Next(depth), b'\\') => Escape(depth),
(Next(depth), b'(') => Next(depth + 1),
(Next(depth), b')') => Next(depth - 1),
(Next(depth), _) | (Escape(depth), _) => Next(depth),
_ => return Err(INVALID),
};
}
Err(TOO_SHORT)
}
enum CommentState {
Start,
Next(usize),
Escape(usize),
}
#[cfg(test)]
mod tests {
use super::{
comment_2822, nanosecond, nanosecond_fixed, short_or_long_month0, short_or_long_weekday,
timezone_offset_2822,
};
use crate::Weekday;
use crate::format::{INVALID, TOO_SHORT};
#[test]
fn test_rfc2822_comments() {
let testdata = [
("", Err(TOO_SHORT)),
(" ", Err(TOO_SHORT)),
("x", Err(INVALID)),
("(", Err(TOO_SHORT)),
("()", Ok("")),
(" \r\n\t()", Ok("")),
("() ", Ok(" ")),
("()z", Ok("z")),
("(x)", Ok("")),
("(())", Ok("")),
("((()))", Ok("")),
("(x(x(x)x)x)", Ok("")),
("( x ( x ( x ) x ) x )", Ok("")),
(r"(\)", Err(TOO_SHORT)),
(r"(\()", Ok("")),
(r"(\))", Ok("")),
(r"(\\)", Ok("")),
("(()())", Ok("")),
("( x ( x ) x ( x ) x )", Ok("")),
];
for (test_in, expected) in testdata.iter() {
let actual = comment_2822(test_in).map(|(s, _)| s);
assert_eq!(
*expected, actual,
"{test_in:?} expected to produce {expected:?}, but produced {actual:?}."
);
}
}
#[test]
fn test_timezone_offset_2822() {
assert_eq!(timezone_offset_2822("cSt").unwrap(), ("", -21600));
assert_eq!(timezone_offset_2822("pSt").unwrap(), ("", -28800));
assert_eq!(timezone_offset_2822("mSt").unwrap(), ("", -25200));
assert_eq!(timezone_offset_2822("-1551").unwrap(), ("", -57060));
assert_eq!(timezone_offset_2822("Gp"), Err(INVALID));
}
#[test]
fn test_short_or_long_month0() {
assert_eq!(short_or_long_month0("JUn").unwrap(), ("", 5));
assert_eq!(short_or_long_month0("mAy").unwrap(), ("", 4));
assert_eq!(short_or_long_month0("AuG").unwrap(), ("", 7));
assert_eq!(short_or_long_month0("Aprâ").unwrap(), ("â", 3));
assert_eq!(short_or_long_month0("JUl").unwrap(), ("", 6));
assert_eq!(short_or_long_month0("mAr").unwrap(), ("", 2));
assert_eq!(short_or_long_month0("Jan").unwrap(), ("", 0));
}
#[test]
fn test_short_or_long_weekday() {
assert_eq!(short_or_long_weekday("sAtu").unwrap(), ("u", Weekday::Sat));
assert_eq!(short_or_long_weekday("thu").unwrap(), ("", Weekday::Thu));
}
#[test]
fn test_nanosecond_fixed() {
assert_eq!(nanosecond_fixed("", 0usize).unwrap(), ("", 0));
assert!(nanosecond_fixed("", 1usize).is_err());
}
#[test]
fn test_nanosecond() {
assert_eq!(nanosecond("").unwrap(), ("Ù", 200000000));
assert_eq!(nanosecond("8").unwrap(), ("", 800000000));
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,735 @@
//! # Chrono: Date and Time for Rust
//!
//! Chrono aims to provide all functionality needed to do correct operations on dates and times in
//! the [proleptic Gregorian calendar]:
//!
//! * The [`DateTime`] type is timezone-aware by default, with separate timezone-naive types.
//! * Operations that may produce an invalid or ambiguous date and time return `Option` or
//! [`MappedLocalTime`].
//! * Configurable parsing and formatting with a `strftime` inspired date and time formatting
//! syntax.
//! * The [`Local`] timezone works with the current timezone of the OS.
//! * Types and operations are implemented to be reasonably efficient.
//!
//! Timezone data is not shipped with chrono by default to limit binary sizes. Use the companion
//! crate [Chrono-TZ] or [`tzfile`] for full timezone support.
//!
//! [proleptic Gregorian calendar]: https://en.wikipedia.org/wiki/Proleptic_Gregorian_calendar
//! [Chrono-TZ]: https://crates.io/crates/chrono-tz
//! [`tzfile`]: https://crates.io/crates/tzfile
//!
//! ### Features
//!
//! Chrono supports various runtime environments and operating systems, and has several features
//! that may be enabled or disabled.
//!
//! Default features:
//!
//! - `alloc`: Enable features that depend on allocation (primarily string formatting).
//! - `std`: Enables functionality that depends on the standard library. This is a superset of
//! `alloc` and adds interoperation with standard library types and traits.
//! - `clock`: Enables reading the local timezone (`Local`). This is a superset of `now`.
//! - `now`: Enables reading the system time (`now`).
//! - `wasmbind`: Interface with the JS Date API for the `wasm32` target.
//!
//! Optional features:
//!
//! - `serde`: Enable serialization/deserialization via [serde].
//! - `rkyv`: Deprecated, use the `rkyv-*` features.
//! - `rkyv-16`: Enable serialization/deserialization via [rkyv],
//! using 16-bit integers for integral `*size` types.
//! - `rkyv-32`: Enable serialization/deserialization via [rkyv],
//! using 32-bit integers for integral `*size` types.
//! - `rkyv-64`: Enable serialization/deserialization via [rkyv],
//! using 64-bit integers for integral `*size` types.
//! - `rkyv-validation`: Enable rkyv validation support using `bytecheck`.
//! - `arbitrary`: Construct arbitrary instances of a type with the Arbitrary crate.
//! - `unstable-locales`: Enable localization. This adds various methods with a `_localized` suffix.
//! The implementation and API may change or even be removed in a patch release. Feedback welcome.
//! - `oldtime`: This feature no longer has any effect; it used to offer compatibility with the
//! `time` 0.1 crate.
//!
//! Note: The `rkyv{,-16,-32,-64}` features are mutually exclusive.
//!
//! See the [cargo docs] for examples of specifying features.
//!
//! [serde]: https://github.com/serde-rs/serde
//! [rkyv]: https://github.com/rkyv/rkyv
//! [cargo docs]: https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#choosing-features
//!
//! ## Overview
//!
//! ### Time delta / Duration
//!
//! Chrono has a [`TimeDelta`] type to represent the magnitude of a time span. This is an "accurate"
//! duration represented as seconds and nanoseconds, and does not represent "nominal" components
//! such as days or months.
//!
//! The [`TimeDelta`] type was previously named `Duration` (and is still available as a type alias
//! with that name). A notable difference with the similar [`core::time::Duration`] is that it is a
//! signed value instead of unsigned.
//!
//! Chrono currently only supports a small number of operations with [`core::time::Duration`].
//! You can convert between both types with the [`TimeDelta::from_std`] and [`TimeDelta::to_std`]
//! methods.
//!
//! ### Date and Time
//!
//! Chrono provides a [`DateTime`] type to represent a date and a time in a timezone.
//!
//! For more abstract moment-in-time tracking such as internal timekeeping that is unconcerned with
//! timezones, consider [`std::time::SystemTime`], which tracks your system clock, or
//! [`std::time::Instant`], which is an opaque but monotonically-increasing representation of a
//! moment in time.
//!
//! [`DateTime`] is timezone-aware and must be constructed from a [`TimeZone`] object, which defines
//! how the local date is converted to and back from the UTC date.
//! There are three well-known [`TimeZone`] implementations:
//!
//! * [`Utc`] specifies the UTC time zone. It is most efficient.
//!
//! * [`Local`] specifies the system local time zone.
//!
//! * [`FixedOffset`] specifies an arbitrary, fixed time zone such as UTC+09:00 or UTC-10:30.
//! This often results from the parsed textual date and time. Since it stores the most information
//! and does not depend on the system environment, you would want to normalize other `TimeZone`s
//! into this type.
//!
//! [`DateTime`]s with different [`TimeZone`] types are distinct and do not mix, but can be
//! converted to each other using the [`DateTime::with_timezone`] method.
//!
//! You can get the current date and time in the UTC time zone ([`Utc::now()`]) or in the local time
//! zone ([`Local::now()`]).
//!
//! ```
//! # #[cfg(feature = "now")] {
//! use chrono::prelude::*;
//!
//! let utc: DateTime<Utc> = Utc::now(); // e.g. `2014-11-28T12:45:59.324310806Z`
//! # let _ = utc;
//! # }
//! ```
//!
//! ```
//! # #[cfg(feature = "clock")] {
//! use chrono::prelude::*;
//!
//! let local: DateTime<Local> = Local::now(); // e.g. `2014-11-28T21:45:59.324310806+09:00`
//! # let _ = local;
//! # }
//! ```
//!
//! Alternatively, you can create your own date and time. This is a bit verbose due to Rust's lack
//! of function and method overloading, but in turn we get a rich combination of initialization
//! methods.
//!
//! ```
//! use chrono::offset::MappedLocalTime;
//! use chrono::prelude::*;
//!
//! # fn doctest() -> Option<()> {
//!
//! let dt = Utc.with_ymd_and_hms(2014, 7, 8, 9, 10, 11).unwrap(); // `2014-07-08T09:10:11Z`
//! assert_eq!(
//! dt,
//! NaiveDate::from_ymd_opt(2014, 7, 8)?
//! .and_hms_opt(9, 10, 11)?
//! .and_utc()
//! );
//!
//! // July 8 is 188th day of the year 2014 (`o` for "ordinal")
//! assert_eq!(dt, NaiveDate::from_yo_opt(2014, 189)?.and_hms_opt(9, 10, 11)?.and_utc());
//! // July 8 is Tuesday in ISO week 28 of the year 2014.
//! assert_eq!(
//! dt,
//! NaiveDate::from_isoywd_opt(2014, 28, Weekday::Tue)?.and_hms_opt(9, 10, 11)?.and_utc()
//! );
//!
//! let dt = NaiveDate::from_ymd_opt(2014, 7, 8)?
//! .and_hms_milli_opt(9, 10, 11, 12)?
//! .and_utc(); // `2014-07-08T09:10:11.012Z`
//! assert_eq!(
//! dt,
//! NaiveDate::from_ymd_opt(2014, 7, 8)?
//! .and_hms_micro_opt(9, 10, 11, 12_000)?
//! .and_utc()
//! );
//! assert_eq!(
//! dt,
//! NaiveDate::from_ymd_opt(2014, 7, 8)?
//! .and_hms_nano_opt(9, 10, 11, 12_000_000)?
//! .and_utc()
//! );
//!
//! // dynamic verification
//! assert_eq!(
//! Utc.with_ymd_and_hms(2014, 7, 8, 21, 15, 33),
//! MappedLocalTime::Single(
//! NaiveDate::from_ymd_opt(2014, 7, 8)?.and_hms_opt(21, 15, 33)?.and_utc()
//! )
//! );
//! assert_eq!(Utc.with_ymd_and_hms(2014, 7, 8, 80, 15, 33), MappedLocalTime::None);
//! assert_eq!(Utc.with_ymd_and_hms(2014, 7, 38, 21, 15, 33), MappedLocalTime::None);
//!
//! # #[cfg(feature = "clock")] {
//! // other time zone objects can be used to construct a local datetime.
//! // obviously, `local_dt` is normally different from `dt`, but `fixed_dt` should be identical.
//! let local_dt = Local
//! .from_local_datetime(
//! &NaiveDate::from_ymd_opt(2014, 7, 8).unwrap().and_hms_milli_opt(9, 10, 11, 12).unwrap(),
//! )
//! .unwrap();
//! let fixed_dt = FixedOffset::east_opt(9 * 3600)
//! .unwrap()
//! .from_local_datetime(
//! &NaiveDate::from_ymd_opt(2014, 7, 8)
//! .unwrap()
//! .and_hms_milli_opt(18, 10, 11, 12)
//! .unwrap(),
//! )
//! .unwrap();
//! assert_eq!(dt, fixed_dt);
//! # let _ = local_dt;
//! # }
//! # Some(())
//! # }
//! # doctest().unwrap();
//! ```
//!
//! Various properties are available to the date and time, and can be altered individually. Most of
//! them are defined in the traits [`Datelike`] and [`Timelike`] which you should `use` before.
//! Addition and subtraction is also supported.
//! The following illustrates most supported operations to the date and time:
//!
//! ```rust
//! use chrono::prelude::*;
//! use chrono::TimeDelta;
//!
//! // assume this returned `2014-11-28T21:45:59.324310806+09:00`:
//! let dt = FixedOffset::east_opt(9 * 3600)
//! .unwrap()
//! .from_local_datetime(
//! &NaiveDate::from_ymd_opt(2014, 11, 28)
//! .unwrap()
//! .and_hms_nano_opt(21, 45, 59, 324310806)
//! .unwrap(),
//! )
//! .unwrap();
//!
//! // property accessors
//! assert_eq!((dt.year(), dt.month(), dt.day()), (2014, 11, 28));
//! assert_eq!((dt.month0(), dt.day0()), (10, 27)); // for unfortunate souls
//! assert_eq!((dt.hour(), dt.minute(), dt.second()), (21, 45, 59));
//! assert_eq!(dt.weekday(), Weekday::Fri);
//! assert_eq!(dt.weekday().number_from_monday(), 5); // Mon=1, ..., Sun=7
//! assert_eq!(dt.ordinal(), 332); // the day of year
//! assert_eq!(dt.num_days_from_ce(), 735565); // the number of days from and including Jan 1, 1
//!
//! // time zone accessor and manipulation
//! assert_eq!(dt.offset().fix().local_minus_utc(), 9 * 3600);
//! assert_eq!(dt.timezone(), FixedOffset::east_opt(9 * 3600).unwrap());
//! assert_eq!(
//! dt.with_timezone(&Utc),
//! NaiveDate::from_ymd_opt(2014, 11, 28)
//! .unwrap()
//! .and_hms_nano_opt(12, 45, 59, 324310806)
//! .unwrap()
//! .and_utc()
//! );
//!
//! // a sample of property manipulations (validates dynamically)
//! assert_eq!(dt.with_day(29).unwrap().weekday(), Weekday::Sat); // 2014-11-29 is Saturday
//! assert_eq!(dt.with_day(32), None);
//! assert_eq!(dt.with_year(-300).unwrap().num_days_from_ce(), -109606); // November 29, 301 BCE
//!
//! // arithmetic operations
//! let dt1 = Utc.with_ymd_and_hms(2014, 11, 14, 8, 9, 10).unwrap();
//! let dt2 = Utc.with_ymd_and_hms(2014, 11, 14, 10, 9, 8).unwrap();
//! assert_eq!(dt1.signed_duration_since(dt2), TimeDelta::try_seconds(-2 * 3600 + 2).unwrap());
//! assert_eq!(dt2.signed_duration_since(dt1), TimeDelta::try_seconds(2 * 3600 - 2).unwrap());
//! assert_eq!(
//! Utc.with_ymd_and_hms(1970, 1, 1, 0, 0, 0).unwrap()
//! + TimeDelta::try_seconds(1_000_000_000).unwrap(),
//! Utc.with_ymd_and_hms(2001, 9, 9, 1, 46, 40).unwrap()
//! );
//! assert_eq!(
//! Utc.with_ymd_and_hms(1970, 1, 1, 0, 0, 0).unwrap()
//! - TimeDelta::try_seconds(1_000_000_000).unwrap(),
//! Utc.with_ymd_and_hms(1938, 4, 24, 22, 13, 20).unwrap()
//! );
//! ```
//!
//! ### Formatting and Parsing
//!
//! Formatting is done via the [`format`](DateTime::format()) method, which format is equivalent to
//! the familiar `strftime` format.
//!
//! See [`format::strftime`](format::strftime#specifiers) documentation for full syntax and list of
//! specifiers.
//!
//! The default `to_string` method and `{:?}` specifier also give a reasonable representation.
//! Chrono also provides [`to_rfc2822`](DateTime::to_rfc2822) and
//! [`to_rfc3339`](DateTime::to_rfc3339) methods for well-known formats.
//!
//! Chrono now also provides date formatting in almost any language without the help of an
//! additional C library. This functionality is under the feature `unstable-locales`:
//!
//! ```toml
//! chrono = { version = "0.4", features = ["unstable-locales"] }
//! ```
//!
//! The `unstable-locales` feature requires and implies at least the `alloc` feature.
//!
//! ```rust
//! # #[allow(unused_imports)]
//! use chrono::prelude::*;
//!
//! # #[cfg(all(feature = "unstable-locales", feature = "alloc"))]
//! # fn test() {
//! let dt = Utc.with_ymd_and_hms(2014, 11, 28, 12, 0, 9).unwrap();
//! assert_eq!(dt.format("%Y-%m-%d %H:%M:%S").to_string(), "2014-11-28 12:00:09");
//! assert_eq!(dt.format("%a %b %e %T %Y").to_string(), "Fri Nov 28 12:00:09 2014");
//! assert_eq!(
//! dt.format_localized("%A %e %B %Y, %T", Locale::fr_BE).to_string(),
//! "vendredi 28 novembre 2014, 12:00:09"
//! );
//!
//! assert_eq!(dt.format("%a %b %e %T %Y").to_string(), dt.format("%c").to_string());
//! assert_eq!(dt.to_string(), "2014-11-28 12:00:09 UTC");
//! assert_eq!(dt.to_rfc2822(), "Fri, 28 Nov 2014 12:00:09 +0000");
//! assert_eq!(dt.to_rfc3339(), "2014-11-28T12:00:09+00:00");
//! assert_eq!(format!("{:?}", dt), "2014-11-28T12:00:09Z");
//!
//! // Note that milli/nanoseconds are only printed if they are non-zero
//! let dt_nano = NaiveDate::from_ymd_opt(2014, 11, 28)
//! .unwrap()
//! .and_hms_nano_opt(12, 0, 9, 1)
//! .unwrap()
//! .and_utc();
//! assert_eq!(format!("{:?}", dt_nano), "2014-11-28T12:00:09.000000001Z");
//! # }
//! # #[cfg(not(all(feature = "unstable-locales", feature = "alloc")))]
//! # fn test() {}
//! # if cfg!(all(feature = "unstable-locales", feature = "alloc")) {
//! # test();
//! # }
//! ```
//!
//! Parsing can be done with two methods:
//!
//! 1. The standard [`FromStr`](std::str::FromStr) trait (and [`parse`](str::parse) method on a
//! string) can be used for parsing `DateTime<FixedOffset>`, `DateTime<Utc>` and
//! `DateTime<Local>` values. This parses what the `{:?}` ([`std::fmt::Debug`] format specifier
//! prints, and requires the offset to be present.
//!
//! 2. [`DateTime::parse_from_str`] parses a date and time with offsets and returns
//! `DateTime<FixedOffset>`. This should be used when the offset is a part of input and the
//! caller cannot guess that. It *cannot* be used when the offset can be missing.
//! [`DateTime::parse_from_rfc2822`] and [`DateTime::parse_from_rfc3339`] are similar but for
//! well-known formats.
//!
//! More detailed control over the parsing process is available via [`format`](mod@format) module.
//!
//! ```rust
//! use chrono::prelude::*;
//!
//! let dt = Utc.with_ymd_and_hms(2014, 11, 28, 12, 0, 9).unwrap();
//! let fixed_dt = dt.with_timezone(&FixedOffset::east_opt(9 * 3600).unwrap());
//!
//! // method 1
//! assert_eq!("2014-11-28T12:00:09Z".parse::<DateTime<Utc>>(), Ok(dt.clone()));
//! assert_eq!("2014-11-28T21:00:09+09:00".parse::<DateTime<Utc>>(), Ok(dt.clone()));
//! assert_eq!("2014-11-28T21:00:09+09:00".parse::<DateTime<FixedOffset>>(), Ok(fixed_dt.clone()));
//!
//! // method 2
//! assert_eq!(
//! DateTime::parse_from_str("2014-11-28 21:00:09 +09:00", "%Y-%m-%d %H:%M:%S %z"),
//! Ok(fixed_dt.clone())
//! );
//! assert_eq!(
//! DateTime::parse_from_rfc2822("Fri, 28 Nov 2014 21:00:09 +0900"),
//! Ok(fixed_dt.clone())
//! );
//! assert_eq!(DateTime::parse_from_rfc3339("2014-11-28T21:00:09+09:00"), Ok(fixed_dt.clone()));
//!
//! // oops, the year is missing!
//! assert!(DateTime::parse_from_str("Fri Nov 28 12:00:09", "%a %b %e %T %Y").is_err());
//! // oops, the format string does not include the year at all!
//! assert!(DateTime::parse_from_str("Fri Nov 28 12:00:09", "%a %b %e %T").is_err());
//! // oops, the weekday is incorrect!
//! assert!(DateTime::parse_from_str("Sat Nov 28 12:00:09 2014", "%a %b %e %T %Y").is_err());
//! ```
//!
//! Again: See [`format::strftime`](format::strftime#specifiers) documentation for full syntax and
//! list of specifiers.
//!
//! ### Conversion from and to EPOCH timestamps
//!
//! Use [`DateTime::from_timestamp(seconds, nanoseconds)`](DateTime::from_timestamp)
//! to construct a [`DateTime<Utc>`] from a UNIX timestamp
//! (seconds, nanoseconds that passed since January 1st 1970).
//!
//! Use [`DateTime.timestamp`](DateTime::timestamp) to get the timestamp (in seconds)
//! from a [`DateTime`]. Additionally, you can use
//! [`DateTime.timestamp_subsec_nanos`](DateTime::timestamp_subsec_nanos)
//! to get the number of additional number of nanoseconds.
//!
//! ```
//! # #[cfg(feature = "alloc")] {
//! // We need the trait in scope to use Utc::timestamp().
//! use chrono::{DateTime, Utc};
//!
//! // Construct a datetime from epoch:
//! let dt: DateTime<Utc> = DateTime::from_timestamp(1_500_000_000, 0).unwrap();
//! assert_eq!(dt.to_rfc2822(), "Fri, 14 Jul 2017 02:40:00 +0000");
//!
//! // Get epoch value from a datetime:
//! let dt = DateTime::parse_from_rfc2822("Fri, 14 Jul 2017 02:40:00 +0000").unwrap();
//! assert_eq!(dt.timestamp(), 1_500_000_000);
//! # }
//! ```
//!
//! ### Naive date and time
//!
//! Chrono provides naive counterparts to `Date`, (non-existent) `Time` and `DateTime` as
//! [`NaiveDate`], [`NaiveTime`] and [`NaiveDateTime`] respectively.
//!
//! They have almost equivalent interfaces as their timezone-aware twins, but are not associated to
//! time zones obviously and can be quite low-level. They are mostly useful for building blocks for
//! higher-level types.
//!
//! Timezone-aware `DateTime` and `Date` types have two methods returning naive versions:
//! [`naive_local`](DateTime::naive_local) returns a view to the naive local time,
//! and [`naive_utc`](DateTime::naive_utc) returns a view to the naive UTC time.
//!
//! ## Limitations
//!
//! * Only the proleptic Gregorian calendar (i.e. extended to support older dates) is supported.
//! * Date types are limited to about +/- 262,000 years from the common epoch.
//! * Time types are limited to nanosecond accuracy.
//! * Leap seconds can be represented, but Chrono does not fully support them.
//! See [Leap Second Handling](NaiveTime#leap-second-handling).
//!
//! ## Rust version requirements
//!
//! The Minimum Supported Rust Version (MSRV) is currently **Rust 1.61.0**.
//!
//! The MSRV is explicitly tested in CI. It may be bumped in minor releases, but this is not done
//! lightly.
//!
//! ## Relation between chrono and time 0.1
//!
//! Rust first had a `time` module added to `std` in its 0.7 release. It later moved to
//! `libextra`, and then to a `libtime` library shipped alongside the standard library. In 2014
//! work on chrono started in order to provide a full-featured date and time library in Rust.
//! Some improvements from chrono made it into the standard library; notably, `chrono::Duration`
//! was included as `std::time::Duration` ([rust#15934]) in 2014.
//!
//! In preparation of Rust 1.0 at the end of 2014 `libtime` was moved out of the Rust distro and
//! into the `time` crate to eventually be redesigned ([rust#18832], [rust#18858]), like the
//! `num` and `rand` crates. Of course chrono kept its dependency on this `time` crate. `time`
//! started re-exporting `std::time::Duration` during this period. Later, the standard library was
//! changed to have a more limited unsigned `Duration` type ([rust#24920], [RFC 1040]), while the
//! `time` crate kept the full functionality with `time::Duration`. `time::Duration` had been a
//! part of chrono's public API.
//!
//! By 2016 `time` 0.1 lived under the `rust-lang-deprecated` organisation and was not actively
//! maintained ([time#136]). chrono absorbed the platform functionality and `Duration` type of the
//! `time` crate in [chrono#478] (the work started in [chrono#286]). In order to preserve
//! compatibility with downstream crates depending on `time` and `chrono` sharing a `Duration`
//! type, chrono kept depending on time 0.1. chrono offered the option to opt out of the `time`
//! dependency by disabling the `oldtime` feature (swapping it out for an effectively similar
//! chrono type). In 2019, @jhpratt took over maintenance on the `time` crate and released what
//! amounts to a new crate as `time` 0.2.
//!
//! [rust#15934]: https://github.com/rust-lang/rust/pull/15934
//! [rust#18832]: https://github.com/rust-lang/rust/pull/18832#issuecomment-62448221
//! [rust#18858]: https://github.com/rust-lang/rust/pull/18858
//! [rust#24920]: https://github.com/rust-lang/rust/pull/24920
//! [RFC 1040]: https://rust-lang.github.io/rfcs/1040-duration-reform.html
//! [time#136]: https://github.com/time-rs/time/issues/136
//! [chrono#286]: https://github.com/chronotope/chrono/pull/286
//! [chrono#478]: https://github.com/chronotope/chrono/pull/478
//!
//! ## Security advisories
//!
//! In November of 2020 [CVE-2020-26235] and [RUSTSEC-2020-0071] were opened against the `time` crate.
//! @quininer had found that calls to `localtime_r` may be unsound ([chrono#499]). Eventually, almost
//! a year later, this was also made into a security advisory against chrono as [RUSTSEC-2020-0159],
//! which had platform code similar to `time`.
//!
//! On Unix-like systems a process is given a timezone id or description via the `TZ` environment
//! variable. We need this timezone data to calculate the current local time from a value that is
//! in UTC, such as the time from the system clock. `time` 0.1 and chrono used the POSIX function
//! `localtime_r` to do the conversion to local time, which reads the `TZ` variable.
//!
//! Rust assumes the environment to be writable and uses locks to access it from multiple threads.
//! Some other programming languages and libraries use similar locking strategies, but these are
//! typically not shared across languages. More importantly, POSIX declares modifying the
//! environment in a multi-threaded process as unsafe, and `getenv` in libc can't be changed to
//! take a lock because it returns a pointer to the data (see [rust#27970] for more discussion).
//!
//! Since version 4.20 chrono no longer uses `localtime_r`, instead using Rust code to query the
//! timezone (from the `TZ` variable or via `iana-time-zone` as a fallback) and work with data
//! from the system timezone database directly. The code for this was forked from the [tz-rs crate]
//! by @x-hgg-x. As such, chrono now respects the Rust lock when reading the `TZ` environment
//! variable. In general, code should avoid modifying the environment.
//!
//! [CVE-2020-26235]: https://nvd.nist.gov/vuln/detail/CVE-2020-26235
//! [RUSTSEC-2020-0071]: https://rustsec.org/advisories/RUSTSEC-2020-0071
//! [chrono#499]: https://github.com/chronotope/chrono/pull/499
//! [RUSTSEC-2020-0159]: https://rustsec.org/advisories/RUSTSEC-2020-0159.html
//! [rust#27970]: https://github.com/rust-lang/rust/issues/27970
//! [chrono#677]: https://github.com/chronotope/chrono/pull/677
//! [tz-rs crate]: https://crates.io/crates/tz-rs
//!
//! ## Removing time 0.1
//!
//! Because time 0.1 has been unmaintained for years, however, the security advisory mentioned
//! above has not been addressed. While chrono maintainers were careful not to break backwards
//! compatibility with the `time::Duration` type, there has been a long stream of issues from
//! users inquiring about the time 0.1 dependency with the vulnerability. We investigated the
//! potential breakage of removing the time 0.1 dependency in [chrono#1095] using a crater-like
//! experiment and determined that the potential for breaking (public) dependencies is very low.
//! We reached out to those few crates that did still depend on compatibility with time 0.1.
//!
//! As such, for chrono 0.4.30 we have decided to swap out the time 0.1 `Duration` implementation
//! for a local one that will offer a strict superset of the existing API going forward. This
//! will prevent most downstream users from being affected by the security vulnerability in time
//! 0.1 while minimizing the ecosystem impact of semver-incompatible version churn.
//!
//! [chrono#1095]: https://github.com/chronotope/chrono/pull/1095
#![doc(html_root_url = "https://docs.rs/chrono/latest/", test(attr(deny(warnings))))]
#![deny(missing_docs)]
#![deny(missing_debug_implementations)]
#![warn(unreachable_pub)]
#![deny(clippy::tests_outside_test_module)]
#![cfg_attr(not(any(feature = "std", test)), no_std)]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#[cfg(feature = "alloc")]
extern crate alloc;
mod time_delta;
#[cfg(feature = "std")]
#[doc(no_inline)]
pub use time_delta::OutOfRangeError;
pub use time_delta::TimeDelta;
/// Alias of [`TimeDelta`].
pub type Duration = TimeDelta;
use core::fmt;
/// A convenience module appropriate for glob imports (`use chrono::prelude::*;`).
pub mod prelude {
#[allow(deprecated)]
pub use crate::Date;
#[cfg(feature = "clock")]
pub use crate::Local;
#[cfg(all(feature = "unstable-locales", feature = "alloc"))]
pub use crate::Locale;
pub use crate::SubsecRound;
pub use crate::{DateTime, SecondsFormat};
pub use crate::{Datelike, Month, Timelike, Weekday};
pub use crate::{FixedOffset, Utc};
pub use crate::{NaiveDate, NaiveDateTime, NaiveTime};
pub use crate::{Offset, TimeZone};
}
mod date;
#[allow(deprecated)]
pub use date::Date;
#[doc(no_inline)]
#[allow(deprecated)]
pub use date::{MAX_DATE, MIN_DATE};
mod datetime;
pub use datetime::DateTime;
#[allow(deprecated)]
#[doc(no_inline)]
pub use datetime::{MAX_DATETIME, MIN_DATETIME};
pub mod format;
/// L10n locales.
#[cfg(feature = "unstable-locales")]
pub use format::Locale;
pub use format::{ParseError, ParseResult, SecondsFormat};
pub mod naive;
#[doc(inline)]
pub use naive::{Days, NaiveDate, NaiveDateTime, NaiveTime};
pub use naive::{IsoWeek, NaiveWeek};
pub mod offset;
#[cfg(feature = "clock")]
#[doc(inline)]
pub use offset::Local;
#[doc(hidden)]
pub use offset::LocalResult;
pub use offset::MappedLocalTime;
#[doc(inline)]
pub use offset::{FixedOffset, Offset, TimeZone, Utc};
pub mod round;
pub use round::{DurationRound, RoundingError, SubsecRound};
mod weekday;
#[doc(no_inline)]
pub use weekday::ParseWeekdayError;
pub use weekday::Weekday;
mod weekday_set;
pub use weekday_set::WeekdaySet;
mod month;
#[doc(no_inline)]
pub use month::ParseMonthError;
pub use month::{Month, Months};
mod traits;
pub use traits::{Datelike, Timelike};
#[cfg(feature = "__internal_bench")]
#[doc(hidden)]
pub use naive::__BenchYearFlags;
/// Serialization/Deserialization with serde
///
/// The [`DateTime`] type has default implementations for (de)serializing to/from the [RFC 3339]
/// format. This module provides alternatives for serializing to timestamps.
///
/// The alternatives are for use with serde's [`with` annotation] combined with the module name.
/// Alternatively the individual `serialize` and `deserialize` functions in each module can be used
/// with serde's [`serialize_with`] and [`deserialize_with`] annotations.
///
/// *Available on crate feature 'serde' only.*
///
/// [RFC 3339]: https://tools.ietf.org/html/rfc3339
/// [`with` annotation]: https://serde.rs/field-attrs.html#with
/// [`serialize_with`]: https://serde.rs/field-attrs.html#serialize_with
/// [`deserialize_with`]: https://serde.rs/field-attrs.html#deserialize_with
#[cfg(feature = "serde")]
pub mod serde {
use core::fmt;
use serde::de;
pub use super::datetime::serde::*;
/// Create a custom `de::Error` with `SerdeError::InvalidTimestamp`.
pub(crate) fn invalid_ts<E, T>(value: T) -> E
where
E: de::Error,
T: fmt::Display,
{
E::custom(SerdeError::InvalidTimestamp(value))
}
enum SerdeError<T: fmt::Display> {
InvalidTimestamp(T),
}
impl<T: fmt::Display> fmt::Display for SerdeError<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
SerdeError::InvalidTimestamp(ts) => {
write!(f, "value is not a legal timestamp: {ts}")
}
}
}
}
}
/// Zero-copy serialization/deserialization with rkyv.
///
/// This module re-exports the `Archived*` versions of chrono's types.
#[cfg(any(feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"))]
pub mod rkyv {
pub use crate::datetime::ArchivedDateTime;
pub use crate::month::ArchivedMonth;
pub use crate::naive::date::ArchivedNaiveDate;
pub use crate::naive::datetime::ArchivedNaiveDateTime;
pub use crate::naive::isoweek::ArchivedIsoWeek;
pub use crate::naive::time::ArchivedNaiveTime;
pub use crate::offset::fixed::ArchivedFixedOffset;
#[cfg(feature = "clock")]
pub use crate::offset::local::ArchivedLocal;
pub use crate::offset::utc::ArchivedUtc;
pub use crate::time_delta::ArchivedTimeDelta;
pub use crate::weekday::ArchivedWeekday;
/// Alias of [`ArchivedTimeDelta`]
pub type ArchivedDuration = ArchivedTimeDelta;
}
/// Out of range error type used in various converting APIs
#[derive(Clone, Copy, Hash, PartialEq, Eq)]
pub struct OutOfRange {
_private: (),
}
impl OutOfRange {
const fn new() -> OutOfRange {
OutOfRange { _private: () }
}
}
impl fmt::Display for OutOfRange {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "out of range")
}
}
impl fmt::Debug for OutOfRange {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "out of range")
}
}
#[cfg(feature = "std")]
impl std::error::Error for OutOfRange {}
/// Workaround because `?` is not (yet) available in const context.
#[macro_export]
#[doc(hidden)]
macro_rules! try_opt {
($e:expr) => {
match $e {
Some(v) => v,
None => return None,
}
};
}
/// Workaround because `.expect()` is not (yet) available in const context.
pub(crate) const fn expect<T: Copy>(opt: Option<T>, msg: &str) -> T {
match opt {
Some(val) => val,
None => panic!("{}", msg),
}
}
#[cfg(test)]
mod tests {
#[cfg(feature = "clock")]
use crate::{DateTime, FixedOffset, Local, NaiveDate, NaiveDateTime, NaiveTime, Utc};
#[test]
#[allow(deprecated)]
#[cfg(feature = "clock")]
fn test_type_sizes() {
use core::mem::size_of;
assert_eq!(size_of::<NaiveDate>(), 4);
assert_eq!(size_of::<Option<NaiveDate>>(), 4);
assert_eq!(size_of::<NaiveTime>(), 8);
assert_eq!(size_of::<Option<NaiveTime>>(), 12);
assert_eq!(size_of::<NaiveDateTime>(), 12);
assert_eq!(size_of::<Option<NaiveDateTime>>(), 12);
assert_eq!(size_of::<DateTime<Utc>>(), 12);
assert_eq!(size_of::<DateTime<FixedOffset>>(), 16);
assert_eq!(size_of::<DateTime<Local>>(), 16);
assert_eq!(size_of::<Option<DateTime<FixedOffset>>>(), 16);
}
}

View File

@@ -0,0 +1,472 @@
use core::fmt;
#[cfg(any(feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"))]
use rkyv::{Archive, Deserialize, Serialize};
use crate::OutOfRange;
use crate::naive::NaiveDate;
/// The month of the year.
///
/// This enum is just a convenience implementation.
/// The month in dates created by DateLike objects does not return this enum.
///
/// It is possible to convert from a date to a month independently
/// ```
/// use chrono::prelude::*;
/// let date = Utc.with_ymd_and_hms(2019, 10, 28, 9, 10, 11).unwrap();
/// // `2019-10-28T09:10:11Z`
/// let month = Month::try_from(u8::try_from(date.month()).unwrap()).ok();
/// assert_eq!(month, Some(Month::October))
/// ```
/// Or from a Month to an integer usable by dates
/// ```
/// # use chrono::prelude::*;
/// let month = Month::January;
/// let dt = Utc.with_ymd_and_hms(2019, month.number_from_month(), 28, 9, 10, 11).unwrap();
/// assert_eq!((dt.year(), dt.month(), dt.day()), (2019, 1, 28));
/// ```
/// Allows mapping from and to month, from 1-January to 12-December.
/// Can be Serialized/Deserialized with serde
// Actual implementation is zero-indexed, API intended as 1-indexed for more intuitive behavior.
#[derive(PartialEq, Eq, Copy, Clone, Debug, Hash, PartialOrd, Ord)]
#[cfg_attr(
any(feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq, PartialOrd)),
rkyv(attr(derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
#[cfg_attr(all(feature = "arbitrary", feature = "std"), derive(arbitrary::Arbitrary))]
pub enum Month {
/// January
January = 0,
/// February
February = 1,
/// March
March = 2,
/// April
April = 3,
/// May
May = 4,
/// June
June = 5,
/// July
July = 6,
/// August
August = 7,
/// September
September = 8,
/// October
October = 9,
/// November
November = 10,
/// December
December = 11,
}
impl Month {
/// The next month.
///
/// `m`: | `January` | `February` | `...` | `December`
/// ----------- | --------- | ---------- | --- | ---------
/// `m.succ()`: | `February` | `March` | `...` | `January`
#[inline]
#[must_use]
pub const fn succ(&self) -> Month {
match *self {
Month::January => Month::February,
Month::February => Month::March,
Month::March => Month::April,
Month::April => Month::May,
Month::May => Month::June,
Month::June => Month::July,
Month::July => Month::August,
Month::August => Month::September,
Month::September => Month::October,
Month::October => Month::November,
Month::November => Month::December,
Month::December => Month::January,
}
}
/// The previous month.
///
/// `m`: | `January` | `February` | `...` | `December`
/// ----------- | --------- | ---------- | --- | ---------
/// `m.pred()`: | `December` | `January` | `...` | `November`
#[inline]
#[must_use]
pub const fn pred(&self) -> Month {
match *self {
Month::January => Month::December,
Month::February => Month::January,
Month::March => Month::February,
Month::April => Month::March,
Month::May => Month::April,
Month::June => Month::May,
Month::July => Month::June,
Month::August => Month::July,
Month::September => Month::August,
Month::October => Month::September,
Month::November => Month::October,
Month::December => Month::November,
}
}
/// Returns a month-of-year number starting from January = 1.
///
/// `m`: | `January` | `February` | `...` | `December`
/// -------------------------| --------- | ---------- | --- | -----
/// `m.number_from_month()`: | 1 | 2 | `...` | 12
#[inline]
#[must_use]
pub const fn number_from_month(&self) -> u32 {
match *self {
Month::January => 1,
Month::February => 2,
Month::March => 3,
Month::April => 4,
Month::May => 5,
Month::June => 6,
Month::July => 7,
Month::August => 8,
Month::September => 9,
Month::October => 10,
Month::November => 11,
Month::December => 12,
}
}
/// Get the name of the month
///
/// ```
/// use chrono::Month;
///
/// assert_eq!(Month::January.name(), "January")
/// ```
#[must_use]
pub const fn name(&self) -> &'static str {
match *self {
Month::January => "January",
Month::February => "February",
Month::March => "March",
Month::April => "April",
Month::May => "May",
Month::June => "June",
Month::July => "July",
Month::August => "August",
Month::September => "September",
Month::October => "October",
Month::November => "November",
Month::December => "December",
}
}
/// Get the length in days of the month
///
/// Yields `None` if `year` is out of range for `NaiveDate`.
pub fn num_days(&self, year: i32) -> Option<u8> {
Some(match *self {
Month::January => 31,
Month::February => match NaiveDate::from_ymd_opt(year, 2, 1)?.leap_year() {
true => 29,
false => 28,
},
Month::March => 31,
Month::April => 30,
Month::May => 31,
Month::June => 30,
Month::July => 31,
Month::August => 31,
Month::September => 30,
Month::October => 31,
Month::November => 30,
Month::December => 31,
})
}
}
impl TryFrom<u8> for Month {
type Error = OutOfRange;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
1 => Ok(Month::January),
2 => Ok(Month::February),
3 => Ok(Month::March),
4 => Ok(Month::April),
5 => Ok(Month::May),
6 => Ok(Month::June),
7 => Ok(Month::July),
8 => Ok(Month::August),
9 => Ok(Month::September),
10 => Ok(Month::October),
11 => Ok(Month::November),
12 => Ok(Month::December),
_ => Err(OutOfRange::new()),
}
}
}
impl num_traits::FromPrimitive for Month {
/// Returns an `Option<Month>` from a i64, assuming a 1-index, January = 1.
///
/// `Month::from_i64(n: i64)`: | `1` | `2` | ... | `12`
/// ---------------------------| -------------------- | --------------------- | ... | -----
/// ``: | Some(Month::January) | Some(Month::February) | ... | Some(Month::December)
#[inline]
fn from_u64(n: u64) -> Option<Month> {
Self::from_u32(n as u32)
}
#[inline]
fn from_i64(n: i64) -> Option<Month> {
Self::from_u32(n as u32)
}
#[inline]
fn from_u32(n: u32) -> Option<Month> {
match n {
1 => Some(Month::January),
2 => Some(Month::February),
3 => Some(Month::March),
4 => Some(Month::April),
5 => Some(Month::May),
6 => Some(Month::June),
7 => Some(Month::July),
8 => Some(Month::August),
9 => Some(Month::September),
10 => Some(Month::October),
11 => Some(Month::November),
12 => Some(Month::December),
_ => None,
}
}
}
/// A duration in calendar months
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
#[cfg_attr(all(feature = "arbitrary", feature = "std"), derive(arbitrary::Arbitrary))]
pub struct Months(pub(crate) u32);
impl Months {
/// Construct a new `Months` from a number of months
pub const fn new(num: u32) -> Self {
Self(num)
}
/// Returns the total number of months in the `Months` instance.
#[inline]
pub const fn as_u32(&self) -> u32 {
self.0
}
}
/// An error resulting from reading `<Month>` value with `FromStr`.
#[derive(Clone, PartialEq, Eq)]
pub struct ParseMonthError {
pub(crate) _dummy: (),
}
#[cfg(feature = "std")]
impl std::error::Error for ParseMonthError {}
impl fmt::Display for ParseMonthError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "ParseMonthError {{ .. }}")
}
}
impl fmt::Debug for ParseMonthError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "ParseMonthError {{ .. }}")
}
}
#[cfg(feature = "serde")]
mod month_serde {
use super::Month;
use serde::{de, ser};
use core::fmt;
impl ser::Serialize for Month {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
serializer.collect_str(self.name())
}
}
struct MonthVisitor;
impl de::Visitor<'_> for MonthVisitor {
type Value = Month;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("Month")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
value.parse().map_err(|_| E::custom("short (3-letter) or full month names expected"))
}
}
impl<'de> de::Deserialize<'de> for Month {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
deserializer.deserialize_str(MonthVisitor)
}
}
}
#[cfg(test)]
mod tests {
use super::Month;
use crate::{Datelike, Months, OutOfRange, TimeZone, Utc};
#[test]
fn test_month_enum_try_from() {
assert_eq!(Month::try_from(1), Ok(Month::January));
assert_eq!(Month::try_from(2), Ok(Month::February));
assert_eq!(Month::try_from(12), Ok(Month::December));
assert_eq!(Month::try_from(13), Err(OutOfRange::new()));
let date = Utc.with_ymd_and_hms(2019, 10, 28, 9, 10, 11).unwrap();
assert_eq!(Month::try_from(date.month() as u8), Ok(Month::October));
let month = Month::January;
let dt = Utc.with_ymd_and_hms(2019, month.number_from_month(), 28, 9, 10, 11).unwrap();
assert_eq!((dt.year(), dt.month(), dt.day()), (2019, 1, 28));
}
#[test]
fn test_month_enum_primitive_parse() {
use num_traits::FromPrimitive;
let jan_opt = Month::from_u32(1);
let feb_opt = Month::from_u64(2);
let dec_opt = Month::from_i64(12);
let no_month = Month::from_u32(13);
assert_eq!(jan_opt, Some(Month::January));
assert_eq!(feb_opt, Some(Month::February));
assert_eq!(dec_opt, Some(Month::December));
assert_eq!(no_month, None);
let date = Utc.with_ymd_and_hms(2019, 10, 28, 9, 10, 11).unwrap();
assert_eq!(Month::from_u32(date.month()), Some(Month::October));
let month = Month::January;
let dt = Utc.with_ymd_and_hms(2019, month.number_from_month(), 28, 9, 10, 11).unwrap();
assert_eq!((dt.year(), dt.month(), dt.day()), (2019, 1, 28));
}
#[test]
fn test_month_enum_succ_pred() {
assert_eq!(Month::January.succ(), Month::February);
assert_eq!(Month::December.succ(), Month::January);
assert_eq!(Month::January.pred(), Month::December);
assert_eq!(Month::February.pred(), Month::January);
}
#[test]
fn test_month_partial_ord() {
assert!(Month::January <= Month::January);
assert!(Month::January < Month::February);
assert!(Month::January < Month::December);
assert!(Month::July >= Month::May);
assert!(Month::September > Month::March);
}
#[test]
fn test_months_as_u32() {
assert_eq!(Months::new(0).as_u32(), 0);
assert_eq!(Months::new(1).as_u32(), 1);
assert_eq!(Months::new(u32::MAX).as_u32(), u32::MAX);
}
#[test]
#[cfg(feature = "serde")]
fn test_serde_serialize() {
use Month::*;
use serde_json::to_string;
let cases: Vec<(Month, &str)> = vec![
(January, "\"January\""),
(February, "\"February\""),
(March, "\"March\""),
(April, "\"April\""),
(May, "\"May\""),
(June, "\"June\""),
(July, "\"July\""),
(August, "\"August\""),
(September, "\"September\""),
(October, "\"October\""),
(November, "\"November\""),
(December, "\"December\""),
];
for (month, expected_str) in cases {
let string = to_string(&month).unwrap();
assert_eq!(string, expected_str);
}
}
#[test]
#[cfg(feature = "serde")]
fn test_serde_deserialize() {
use Month::*;
use serde_json::from_str;
let cases: Vec<(&str, Month)> = vec![
("\"january\"", January),
("\"jan\"", January),
("\"FeB\"", February),
("\"MAR\"", March),
("\"mar\"", March),
("\"april\"", April),
("\"may\"", May),
("\"june\"", June),
("\"JULY\"", July),
("\"august\"", August),
("\"september\"", September),
("\"October\"", October),
("\"November\"", November),
("\"DECEmbEr\"", December),
];
for (string, expected_month) in cases {
let month = from_str::<Month>(string).unwrap();
assert_eq!(month, expected_month);
}
let errors: Vec<&str> =
vec!["\"not a month\"", "\"ja\"", "\"Dece\"", "Dec", "\"Augustin\""];
for string in errors {
from_str::<Month>(string).unwrap_err();
}
}
#[test]
#[cfg(feature = "rkyv-validation")]
fn test_rkyv_validation() {
let month = Month::January;
let bytes = rkyv::to_bytes::<_, 1>(&month).unwrap();
assert_eq!(rkyv::from_bytes::<Month>(&bytes).unwrap(), month);
}
#[test]
fn num_days() {
assert_eq!(Month::January.num_days(2020), Some(31));
assert_eq!(Month::February.num_days(2020), Some(29));
assert_eq!(Month::February.num_days(2019), Some(28));
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,875 @@
use super::{Days, MAX_YEAR, MIN_YEAR, Months, NaiveDate};
use crate::naive::internals::{A, AG, B, BA, C, CB, D, DC, E, ED, F, FE, G, GF, YearFlags};
use crate::{Datelike, TimeDelta, Weekday};
// as it is hard to verify year flags in `NaiveDate::MIN` and `NaiveDate::MAX`,
// we use a separate run-time test.
#[test]
fn test_date_bounds() {
let calculated_min = NaiveDate::from_ymd_opt(MIN_YEAR, 1, 1).unwrap();
let calculated_max = NaiveDate::from_ymd_opt(MAX_YEAR, 12, 31).unwrap();
assert!(
NaiveDate::MIN == calculated_min,
"`NaiveDate::MIN` should have year flag {:?}",
calculated_min.year_flags()
);
assert!(
NaiveDate::MAX == calculated_max,
"`NaiveDate::MAX` should have year flag {:?} and ordinal {}",
calculated_max.year_flags(),
calculated_max.ordinal()
);
// let's also check that the entire range do not exceed 2^44 seconds
// (sometimes used for bounding `TimeDelta` against overflow)
let maxsecs = NaiveDate::MAX.signed_duration_since(NaiveDate::MIN).num_seconds();
let maxsecs = maxsecs + 86401; // also take care of DateTime
assert!(
maxsecs < (1 << MAX_BITS),
"The entire `NaiveDate` range somehow exceeds 2^{MAX_BITS} seconds"
);
const BEFORE_MIN: NaiveDate = NaiveDate::BEFORE_MIN;
assert_eq!(BEFORE_MIN.year_flags(), YearFlags::from_year(BEFORE_MIN.year()));
assert_eq!((BEFORE_MIN.month(), BEFORE_MIN.day()), (12, 31));
const AFTER_MAX: NaiveDate = NaiveDate::AFTER_MAX;
assert_eq!(AFTER_MAX.year_flags(), YearFlags::from_year(AFTER_MAX.year()));
assert_eq!((AFTER_MAX.month(), AFTER_MAX.day()), (1, 1));
}
#[test]
fn diff_months() {
// identity
assert_eq!(
NaiveDate::from_ymd_opt(2022, 8, 3).unwrap().checked_add_months(Months::new(0)),
Some(NaiveDate::from_ymd_opt(2022, 8, 3).unwrap())
);
// add with months exceeding `i32::MAX`
assert_eq!(
NaiveDate::from_ymd_opt(2022, 8, 3)
.unwrap()
.checked_add_months(Months::new(i32::MAX as u32 + 1)),
None
);
// sub with months exceeding `i32::MIN`
assert_eq!(
NaiveDate::from_ymd_opt(2022, 8, 3)
.unwrap()
.checked_sub_months(Months::new(i32::MIN.unsigned_abs() + 1)),
None
);
// add overflowing year
assert_eq!(NaiveDate::MAX.checked_add_months(Months::new(1)), None);
// add underflowing year
assert_eq!(NaiveDate::MIN.checked_sub_months(Months::new(1)), None);
// sub crossing year 0 boundary
assert_eq!(
NaiveDate::from_ymd_opt(2022, 8, 3).unwrap().checked_sub_months(Months::new(2050 * 12)),
Some(NaiveDate::from_ymd_opt(-28, 8, 3).unwrap())
);
// add crossing year boundary
assert_eq!(
NaiveDate::from_ymd_opt(2022, 8, 3).unwrap().checked_add_months(Months::new(6)),
Some(NaiveDate::from_ymd_opt(2023, 2, 3).unwrap())
);
// sub crossing year boundary
assert_eq!(
NaiveDate::from_ymd_opt(2022, 8, 3).unwrap().checked_sub_months(Months::new(10)),
Some(NaiveDate::from_ymd_opt(2021, 10, 3).unwrap())
);
// add clamping day, non-leap year
assert_eq!(
NaiveDate::from_ymd_opt(2022, 1, 29).unwrap().checked_add_months(Months::new(1)),
Some(NaiveDate::from_ymd_opt(2022, 2, 28).unwrap())
);
// add to leap day
assert_eq!(
NaiveDate::from_ymd_opt(2022, 10, 29).unwrap().checked_add_months(Months::new(16)),
Some(NaiveDate::from_ymd_opt(2024, 2, 29).unwrap())
);
// add into december
assert_eq!(
NaiveDate::from_ymd_opt(2022, 10, 31).unwrap().checked_add_months(Months::new(2)),
Some(NaiveDate::from_ymd_opt(2022, 12, 31).unwrap())
);
// sub into december
assert_eq!(
NaiveDate::from_ymd_opt(2022, 10, 31).unwrap().checked_sub_months(Months::new(10)),
Some(NaiveDate::from_ymd_opt(2021, 12, 31).unwrap())
);
// add into january
assert_eq!(
NaiveDate::from_ymd_opt(2022, 8, 3).unwrap().checked_add_months(Months::new(5)),
Some(NaiveDate::from_ymd_opt(2023, 1, 3).unwrap())
);
// sub into january
assert_eq!(
NaiveDate::from_ymd_opt(2022, 8, 3).unwrap().checked_sub_months(Months::new(7)),
Some(NaiveDate::from_ymd_opt(2022, 1, 3).unwrap())
);
}
#[test]
fn test_readme_doomsday() {
for y in NaiveDate::MIN.year()..=NaiveDate::MAX.year() {
// even months
let d4 = NaiveDate::from_ymd_opt(y, 4, 4).unwrap();
let d6 = NaiveDate::from_ymd_opt(y, 6, 6).unwrap();
let d8 = NaiveDate::from_ymd_opt(y, 8, 8).unwrap();
let d10 = NaiveDate::from_ymd_opt(y, 10, 10).unwrap();
let d12 = NaiveDate::from_ymd_opt(y, 12, 12).unwrap();
// nine to five, seven-eleven
let d59 = NaiveDate::from_ymd_opt(y, 5, 9).unwrap();
let d95 = NaiveDate::from_ymd_opt(y, 9, 5).unwrap();
let d711 = NaiveDate::from_ymd_opt(y, 7, 11).unwrap();
let d117 = NaiveDate::from_ymd_opt(y, 11, 7).unwrap();
// "March 0"
let d30 = NaiveDate::from_ymd_opt(y, 3, 1).unwrap().pred_opt().unwrap();
let weekday = d30.weekday();
let other_dates = [d4, d6, d8, d10, d12, d59, d95, d711, d117];
assert!(other_dates.iter().all(|d| d.weekday() == weekday));
}
}
#[test]
fn test_date_from_ymd() {
let from_ymd = NaiveDate::from_ymd_opt;
assert!(from_ymd(2012, 0, 1).is_none());
assert!(from_ymd(2012, 1, 1).is_some());
assert!(from_ymd(2012, 2, 29).is_some());
assert!(from_ymd(2014, 2, 29).is_none());
assert!(from_ymd(2014, 3, 0).is_none());
assert!(from_ymd(2014, 3, 1).is_some());
assert!(from_ymd(2014, 3, 31).is_some());
assert!(from_ymd(2014, 3, 32).is_none());
assert!(from_ymd(2014, 12, 31).is_some());
assert!(from_ymd(2014, 13, 1).is_none());
}
#[test]
fn test_date_from_yo() {
let from_yo = NaiveDate::from_yo_opt;
let ymd = |y, m, d| NaiveDate::from_ymd_opt(y, m, d).unwrap();
assert_eq!(from_yo(2012, 0), None);
assert_eq!(from_yo(2012, 1), Some(ymd(2012, 1, 1)));
assert_eq!(from_yo(2012, 2), Some(ymd(2012, 1, 2)));
assert_eq!(from_yo(2012, 32), Some(ymd(2012, 2, 1)));
assert_eq!(from_yo(2012, 60), Some(ymd(2012, 2, 29)));
assert_eq!(from_yo(2012, 61), Some(ymd(2012, 3, 1)));
assert_eq!(from_yo(2012, 100), Some(ymd(2012, 4, 9)));
assert_eq!(from_yo(2012, 200), Some(ymd(2012, 7, 18)));
assert_eq!(from_yo(2012, 300), Some(ymd(2012, 10, 26)));
assert_eq!(from_yo(2012, 366), Some(ymd(2012, 12, 31)));
assert_eq!(from_yo(2012, 367), None);
assert_eq!(from_yo(2012, (1 << 28) | 60), None);
assert_eq!(from_yo(2014, 0), None);
assert_eq!(from_yo(2014, 1), Some(ymd(2014, 1, 1)));
assert_eq!(from_yo(2014, 2), Some(ymd(2014, 1, 2)));
assert_eq!(from_yo(2014, 32), Some(ymd(2014, 2, 1)));
assert_eq!(from_yo(2014, 59), Some(ymd(2014, 2, 28)));
assert_eq!(from_yo(2014, 60), Some(ymd(2014, 3, 1)));
assert_eq!(from_yo(2014, 100), Some(ymd(2014, 4, 10)));
assert_eq!(from_yo(2014, 200), Some(ymd(2014, 7, 19)));
assert_eq!(from_yo(2014, 300), Some(ymd(2014, 10, 27)));
assert_eq!(from_yo(2014, 365), Some(ymd(2014, 12, 31)));
assert_eq!(from_yo(2014, 366), None);
}
#[test]
fn test_date_from_isoywd() {
let from_isoywd = NaiveDate::from_isoywd_opt;
let ymd = |y, m, d| NaiveDate::from_ymd_opt(y, m, d).unwrap();
assert_eq!(from_isoywd(2004, 0, Weekday::Sun), None);
assert_eq!(from_isoywd(2004, 1, Weekday::Mon), Some(ymd(2003, 12, 29)));
assert_eq!(from_isoywd(2004, 1, Weekday::Sun), Some(ymd(2004, 1, 4)));
assert_eq!(from_isoywd(2004, 2, Weekday::Mon), Some(ymd(2004, 1, 5)));
assert_eq!(from_isoywd(2004, 2, Weekday::Sun), Some(ymd(2004, 1, 11)));
assert_eq!(from_isoywd(2004, 52, Weekday::Mon), Some(ymd(2004, 12, 20)));
assert_eq!(from_isoywd(2004, 52, Weekday::Sun), Some(ymd(2004, 12, 26)));
assert_eq!(from_isoywd(2004, 53, Weekday::Mon), Some(ymd(2004, 12, 27)));
assert_eq!(from_isoywd(2004, 53, Weekday::Sun), Some(ymd(2005, 1, 2)));
assert_eq!(from_isoywd(2004, 54, Weekday::Mon), None);
assert_eq!(from_isoywd(2011, 0, Weekday::Sun), None);
assert_eq!(from_isoywd(2011, 1, Weekday::Mon), Some(ymd(2011, 1, 3)));
assert_eq!(from_isoywd(2011, 1, Weekday::Sun), Some(ymd(2011, 1, 9)));
assert_eq!(from_isoywd(2011, 2, Weekday::Mon), Some(ymd(2011, 1, 10)));
assert_eq!(from_isoywd(2011, 2, Weekday::Sun), Some(ymd(2011, 1, 16)));
assert_eq!(from_isoywd(2018, 51, Weekday::Mon), Some(ymd(2018, 12, 17)));
assert_eq!(from_isoywd(2018, 51, Weekday::Sun), Some(ymd(2018, 12, 23)));
assert_eq!(from_isoywd(2018, 52, Weekday::Mon), Some(ymd(2018, 12, 24)));
assert_eq!(from_isoywd(2018, 52, Weekday::Sun), Some(ymd(2018, 12, 30)));
assert_eq!(from_isoywd(2018, 53, Weekday::Mon), None);
}
#[test]
fn test_date_from_isoywd_and_iso_week() {
for year in 2000..2401 {
for week in 1..54 {
for &weekday in [
Weekday::Mon,
Weekday::Tue,
Weekday::Wed,
Weekday::Thu,
Weekday::Fri,
Weekday::Sat,
Weekday::Sun,
]
.iter()
{
let d = NaiveDate::from_isoywd_opt(year, week, weekday);
if let Some(d) = d {
assert_eq!(d.weekday(), weekday);
let w = d.iso_week();
assert_eq!(w.year(), year);
assert_eq!(w.week(), week);
}
}
}
}
for year in 2000..2401 {
for month in 1..13 {
for day in 1..32 {
let d = NaiveDate::from_ymd_opt(year, month, day);
if let Some(d) = d {
let w = d.iso_week();
let d_ = NaiveDate::from_isoywd_opt(w.year(), w.week(), d.weekday());
assert_eq!(d, d_.unwrap());
}
}
}
}
}
#[test]
fn test_date_from_num_days_from_ce() {
let from_ndays_from_ce = NaiveDate::from_num_days_from_ce_opt;
assert_eq!(from_ndays_from_ce(1), Some(NaiveDate::from_ymd_opt(1, 1, 1).unwrap()));
assert_eq!(from_ndays_from_ce(2), Some(NaiveDate::from_ymd_opt(1, 1, 2).unwrap()));
assert_eq!(from_ndays_from_ce(31), Some(NaiveDate::from_ymd_opt(1, 1, 31).unwrap()));
assert_eq!(from_ndays_from_ce(32), Some(NaiveDate::from_ymd_opt(1, 2, 1).unwrap()));
assert_eq!(from_ndays_from_ce(59), Some(NaiveDate::from_ymd_opt(1, 2, 28).unwrap()));
assert_eq!(from_ndays_from_ce(60), Some(NaiveDate::from_ymd_opt(1, 3, 1).unwrap()));
assert_eq!(from_ndays_from_ce(365), Some(NaiveDate::from_ymd_opt(1, 12, 31).unwrap()));
assert_eq!(from_ndays_from_ce(365 + 1), Some(NaiveDate::from_ymd_opt(2, 1, 1).unwrap()));
assert_eq!(from_ndays_from_ce(365 * 2 + 1), Some(NaiveDate::from_ymd_opt(3, 1, 1).unwrap()));
assert_eq!(from_ndays_from_ce(365 * 3 + 1), Some(NaiveDate::from_ymd_opt(4, 1, 1).unwrap()));
assert_eq!(from_ndays_from_ce(365 * 4 + 2), Some(NaiveDate::from_ymd_opt(5, 1, 1).unwrap()));
assert_eq!(from_ndays_from_ce(146097 + 1), Some(NaiveDate::from_ymd_opt(401, 1, 1).unwrap()));
assert_eq!(
from_ndays_from_ce(146097 * 5 + 1),
Some(NaiveDate::from_ymd_opt(2001, 1, 1).unwrap())
);
assert_eq!(from_ndays_from_ce(719163), Some(NaiveDate::from_ymd_opt(1970, 1, 1).unwrap()));
assert_eq!(from_ndays_from_ce(0), Some(NaiveDate::from_ymd_opt(0, 12, 31).unwrap())); // 1 BCE
assert_eq!(from_ndays_from_ce(-365), Some(NaiveDate::from_ymd_opt(0, 1, 1).unwrap()));
assert_eq!(from_ndays_from_ce(-366), Some(NaiveDate::from_ymd_opt(-1, 12, 31).unwrap())); // 2 BCE
for days in (-9999..10001).map(|x| x * 100) {
assert_eq!(from_ndays_from_ce(days).map(|d| d.num_days_from_ce()), Some(days));
}
assert_eq!(from_ndays_from_ce(NaiveDate::MIN.num_days_from_ce()), Some(NaiveDate::MIN));
assert_eq!(from_ndays_from_ce(NaiveDate::MIN.num_days_from_ce() - 1), None);
assert_eq!(from_ndays_from_ce(NaiveDate::MAX.num_days_from_ce()), Some(NaiveDate::MAX));
assert_eq!(from_ndays_from_ce(NaiveDate::MAX.num_days_from_ce() + 1), None);
assert_eq!(from_ndays_from_ce(i32::MIN), None);
assert_eq!(from_ndays_from_ce(i32::MAX), None);
}
#[test]
fn test_date_from_weekday_of_month_opt() {
let ymwd = NaiveDate::from_weekday_of_month_opt;
assert_eq!(ymwd(2018, 8, Weekday::Tue, 0), None);
assert_eq!(ymwd(2018, 8, Weekday::Wed, 1), Some(NaiveDate::from_ymd_opt(2018, 8, 1).unwrap()));
assert_eq!(ymwd(2018, 8, Weekday::Thu, 1), Some(NaiveDate::from_ymd_opt(2018, 8, 2).unwrap()));
assert_eq!(ymwd(2018, 8, Weekday::Sun, 1), Some(NaiveDate::from_ymd_opt(2018, 8, 5).unwrap()));
assert_eq!(ymwd(2018, 8, Weekday::Mon, 1), Some(NaiveDate::from_ymd_opt(2018, 8, 6).unwrap()));
assert_eq!(ymwd(2018, 8, Weekday::Tue, 1), Some(NaiveDate::from_ymd_opt(2018, 8, 7).unwrap()));
assert_eq!(ymwd(2018, 8, Weekday::Wed, 2), Some(NaiveDate::from_ymd_opt(2018, 8, 8).unwrap()));
assert_eq!(ymwd(2018, 8, Weekday::Sun, 2), Some(NaiveDate::from_ymd_opt(2018, 8, 12).unwrap()));
assert_eq!(ymwd(2018, 8, Weekday::Thu, 3), Some(NaiveDate::from_ymd_opt(2018, 8, 16).unwrap()));
assert_eq!(ymwd(2018, 8, Weekday::Thu, 4), Some(NaiveDate::from_ymd_opt(2018, 8, 23).unwrap()));
assert_eq!(ymwd(2018, 8, Weekday::Thu, 5), Some(NaiveDate::from_ymd_opt(2018, 8, 30).unwrap()));
assert_eq!(ymwd(2018, 8, Weekday::Fri, 5), Some(NaiveDate::from_ymd_opt(2018, 8, 31).unwrap()));
assert_eq!(ymwd(2018, 8, Weekday::Sat, 5), None);
}
#[test]
fn test_date_fields() {
fn check(year: i32, month: u32, day: u32, ordinal: u32) {
let d1 = NaiveDate::from_ymd_opt(year, month, day).unwrap();
assert_eq!(d1.year(), year);
assert_eq!(d1.month(), month);
assert_eq!(d1.day(), day);
assert_eq!(d1.ordinal(), ordinal);
let d2 = NaiveDate::from_yo_opt(year, ordinal).unwrap();
assert_eq!(d2.year(), year);
assert_eq!(d2.month(), month);
assert_eq!(d2.day(), day);
assert_eq!(d2.ordinal(), ordinal);
assert_eq!(d1, d2);
}
check(2012, 1, 1, 1);
check(2012, 1, 2, 2);
check(2012, 2, 1, 32);
check(2012, 2, 29, 60);
check(2012, 3, 1, 61);
check(2012, 4, 9, 100);
check(2012, 7, 18, 200);
check(2012, 10, 26, 300);
check(2012, 12, 31, 366);
check(2014, 1, 1, 1);
check(2014, 1, 2, 2);
check(2014, 2, 1, 32);
check(2014, 2, 28, 59);
check(2014, 3, 1, 60);
check(2014, 4, 10, 100);
check(2014, 7, 19, 200);
check(2014, 10, 27, 300);
check(2014, 12, 31, 365);
}
#[test]
fn test_date_weekday() {
assert_eq!(NaiveDate::from_ymd_opt(1582, 10, 15).unwrap().weekday(), Weekday::Fri);
// May 20, 1875 = ISO 8601 reference date
assert_eq!(NaiveDate::from_ymd_opt(1875, 5, 20).unwrap().weekday(), Weekday::Thu);
assert_eq!(NaiveDate::from_ymd_opt(2000, 1, 1).unwrap().weekday(), Weekday::Sat);
}
#[test]
fn test_date_with_fields() {
let d = NaiveDate::from_ymd_opt(2000, 2, 29).unwrap();
assert_eq!(d.with_year(-400), Some(NaiveDate::from_ymd_opt(-400, 2, 29).unwrap()));
assert_eq!(d.with_year(-100), None);
assert_eq!(d.with_year(1600), Some(NaiveDate::from_ymd_opt(1600, 2, 29).unwrap()));
assert_eq!(d.with_year(1900), None);
assert_eq!(d.with_year(2000), Some(NaiveDate::from_ymd_opt(2000, 2, 29).unwrap()));
assert_eq!(d.with_year(2001), None);
assert_eq!(d.with_year(2004), Some(NaiveDate::from_ymd_opt(2004, 2, 29).unwrap()));
assert_eq!(d.with_year(i32::MAX), None);
let d = NaiveDate::from_ymd_opt(2000, 4, 30).unwrap();
assert_eq!(d.with_month(0), None);
assert_eq!(d.with_month(1), Some(NaiveDate::from_ymd_opt(2000, 1, 30).unwrap()));
assert_eq!(d.with_month(2), None);
assert_eq!(d.with_month(3), Some(NaiveDate::from_ymd_opt(2000, 3, 30).unwrap()));
assert_eq!(d.with_month(4), Some(NaiveDate::from_ymd_opt(2000, 4, 30).unwrap()));
assert_eq!(d.with_month(12), Some(NaiveDate::from_ymd_opt(2000, 12, 30).unwrap()));
assert_eq!(d.with_month(13), None);
assert_eq!(d.with_month(u32::MAX), None);
let d = NaiveDate::from_ymd_opt(2000, 2, 8).unwrap();
assert_eq!(d.with_day(0), None);
assert_eq!(d.with_day(1), Some(NaiveDate::from_ymd_opt(2000, 2, 1).unwrap()));
assert_eq!(d.with_day(29), Some(NaiveDate::from_ymd_opt(2000, 2, 29).unwrap()));
assert_eq!(d.with_day(30), None);
assert_eq!(d.with_day(u32::MAX), None);
}
#[test]
fn test_date_with_ordinal() {
let d = NaiveDate::from_ymd_opt(2000, 5, 5).unwrap();
assert_eq!(d.with_ordinal(0), None);
assert_eq!(d.with_ordinal(1), Some(NaiveDate::from_ymd_opt(2000, 1, 1).unwrap()));
assert_eq!(d.with_ordinal(60), Some(NaiveDate::from_ymd_opt(2000, 2, 29).unwrap()));
assert_eq!(d.with_ordinal(61), Some(NaiveDate::from_ymd_opt(2000, 3, 1).unwrap()));
assert_eq!(d.with_ordinal(366), Some(NaiveDate::from_ymd_opt(2000, 12, 31).unwrap()));
assert_eq!(d.with_ordinal(367), None);
assert_eq!(d.with_ordinal((1 << 28) | 60), None);
let d = NaiveDate::from_ymd_opt(1999, 5, 5).unwrap();
assert_eq!(d.with_ordinal(366), None);
assert_eq!(d.with_ordinal(u32::MAX), None);
}
#[test]
fn test_date_num_days_from_ce() {
assert_eq!(NaiveDate::from_ymd_opt(1, 1, 1).unwrap().num_days_from_ce(), 1);
for year in -9999..10001 {
assert_eq!(
NaiveDate::from_ymd_opt(year, 1, 1).unwrap().num_days_from_ce(),
NaiveDate::from_ymd_opt(year - 1, 12, 31).unwrap().num_days_from_ce() + 1
);
}
}
#[test]
fn test_date_succ() {
let ymd = |y, m, d| NaiveDate::from_ymd_opt(y, m, d).unwrap();
assert_eq!(ymd(2014, 5, 6).succ_opt(), Some(ymd(2014, 5, 7)));
assert_eq!(ymd(2014, 5, 31).succ_opt(), Some(ymd(2014, 6, 1)));
assert_eq!(ymd(2014, 12, 31).succ_opt(), Some(ymd(2015, 1, 1)));
assert_eq!(ymd(2016, 2, 28).succ_opt(), Some(ymd(2016, 2, 29)));
assert_eq!(ymd(NaiveDate::MAX.year(), 12, 31).succ_opt(), None);
}
#[test]
fn test_date_pred() {
let ymd = |y, m, d| NaiveDate::from_ymd_opt(y, m, d).unwrap();
assert_eq!(ymd(2016, 3, 1).pred_opt(), Some(ymd(2016, 2, 29)));
assert_eq!(ymd(2015, 1, 1).pred_opt(), Some(ymd(2014, 12, 31)));
assert_eq!(ymd(2014, 6, 1).pred_opt(), Some(ymd(2014, 5, 31)));
assert_eq!(ymd(2014, 5, 7).pred_opt(), Some(ymd(2014, 5, 6)));
assert_eq!(ymd(NaiveDate::MIN.year(), 1, 1).pred_opt(), None);
}
#[test]
fn test_date_checked_add_signed() {
fn check(lhs: Option<NaiveDate>, delta: TimeDelta, rhs: Option<NaiveDate>) {
assert_eq!(lhs.unwrap().checked_add_signed(delta), rhs);
assert_eq!(lhs.unwrap().checked_sub_signed(-delta), rhs);
}
let ymd = NaiveDate::from_ymd_opt;
check(ymd(2014, 1, 1), TimeDelta::zero(), ymd(2014, 1, 1));
check(ymd(2014, 1, 1), TimeDelta::try_seconds(86399).unwrap(), ymd(2014, 1, 1));
// always round towards zero
check(ymd(2014, 1, 1), TimeDelta::try_seconds(-86399).unwrap(), ymd(2014, 1, 1));
check(ymd(2014, 1, 1), TimeDelta::try_days(1).unwrap(), ymd(2014, 1, 2));
check(ymd(2014, 1, 1), TimeDelta::try_days(-1).unwrap(), ymd(2013, 12, 31));
check(ymd(2014, 1, 1), TimeDelta::try_days(364).unwrap(), ymd(2014, 12, 31));
check(ymd(2014, 1, 1), TimeDelta::try_days(365 * 4 + 1).unwrap(), ymd(2018, 1, 1));
check(ymd(2014, 1, 1), TimeDelta::try_days(365 * 400 + 97).unwrap(), ymd(2414, 1, 1));
check(ymd(-7, 1, 1), TimeDelta::try_days(365 * 12 + 3).unwrap(), ymd(5, 1, 1));
// overflow check
check(
ymd(0, 1, 1),
TimeDelta::try_days(MAX_DAYS_FROM_YEAR_0 as i64).unwrap(),
ymd(MAX_YEAR, 12, 31),
);
check(ymd(0, 1, 1), TimeDelta::try_days(MAX_DAYS_FROM_YEAR_0 as i64 + 1).unwrap(), None);
check(ymd(0, 1, 1), TimeDelta::MAX, None);
check(
ymd(0, 1, 1),
TimeDelta::try_days(MIN_DAYS_FROM_YEAR_0 as i64).unwrap(),
ymd(MIN_YEAR, 1, 1),
);
check(ymd(0, 1, 1), TimeDelta::try_days(MIN_DAYS_FROM_YEAR_0 as i64 - 1).unwrap(), None);
check(ymd(0, 1, 1), TimeDelta::MIN, None);
}
#[test]
fn test_date_signed_duration_since() {
fn check(lhs: Option<NaiveDate>, rhs: Option<NaiveDate>, delta: TimeDelta) {
assert_eq!(lhs.unwrap().signed_duration_since(rhs.unwrap()), delta);
assert_eq!(rhs.unwrap().signed_duration_since(lhs.unwrap()), -delta);
}
let ymd = NaiveDate::from_ymd_opt;
check(ymd(2014, 1, 1), ymd(2014, 1, 1), TimeDelta::zero());
check(ymd(2014, 1, 2), ymd(2014, 1, 1), TimeDelta::try_days(1).unwrap());
check(ymd(2014, 12, 31), ymd(2014, 1, 1), TimeDelta::try_days(364).unwrap());
check(ymd(2015, 1, 3), ymd(2014, 1, 1), TimeDelta::try_days(365 + 2).unwrap());
check(ymd(2018, 1, 1), ymd(2014, 1, 1), TimeDelta::try_days(365 * 4 + 1).unwrap());
check(ymd(2414, 1, 1), ymd(2014, 1, 1), TimeDelta::try_days(365 * 400 + 97).unwrap());
check(
ymd(MAX_YEAR, 12, 31),
ymd(0, 1, 1),
TimeDelta::try_days(MAX_DAYS_FROM_YEAR_0 as i64).unwrap(),
);
check(
ymd(MIN_YEAR, 1, 1),
ymd(0, 1, 1),
TimeDelta::try_days(MIN_DAYS_FROM_YEAR_0 as i64).unwrap(),
);
}
#[test]
fn test_date_add_days() {
fn check(lhs: Option<NaiveDate>, days: Days, rhs: Option<NaiveDate>) {
assert_eq!(lhs.unwrap().checked_add_days(days), rhs);
}
let ymd = NaiveDate::from_ymd_opt;
check(ymd(2014, 1, 1), Days::new(0), ymd(2014, 1, 1));
// always round towards zero
check(ymd(2014, 1, 1), Days::new(1), ymd(2014, 1, 2));
check(ymd(2014, 1, 1), Days::new(364), ymd(2014, 12, 31));
check(ymd(2014, 1, 1), Days::new(365 * 4 + 1), ymd(2018, 1, 1));
check(ymd(2014, 1, 1), Days::new(365 * 400 + 97), ymd(2414, 1, 1));
check(ymd(-7, 1, 1), Days::new(365 * 12 + 3), ymd(5, 1, 1));
// overflow check
check(ymd(0, 1, 1), Days::new(MAX_DAYS_FROM_YEAR_0.try_into().unwrap()), ymd(MAX_YEAR, 12, 31));
check(ymd(0, 1, 1), Days::new(u64::try_from(MAX_DAYS_FROM_YEAR_0).unwrap() + 1), None);
}
#[test]
fn test_date_sub_days() {
fn check(lhs: Option<NaiveDate>, days: Days, rhs: Option<NaiveDate>) {
assert_eq!(lhs.unwrap().checked_sub_days(days), rhs);
}
let ymd = NaiveDate::from_ymd_opt;
check(ymd(2014, 1, 1), Days::new(0), ymd(2014, 1, 1));
check(ymd(2014, 1, 2), Days::new(1), ymd(2014, 1, 1));
check(ymd(2014, 12, 31), Days::new(364), ymd(2014, 1, 1));
check(ymd(2015, 1, 3), Days::new(365 + 2), ymd(2014, 1, 1));
check(ymd(2018, 1, 1), Days::new(365 * 4 + 1), ymd(2014, 1, 1));
check(ymd(2414, 1, 1), Days::new(365 * 400 + 97), ymd(2014, 1, 1));
check(ymd(MAX_YEAR, 12, 31), Days::new(MAX_DAYS_FROM_YEAR_0.try_into().unwrap()), ymd(0, 1, 1));
check(
ymd(0, 1, 1),
Days::new((-MIN_DAYS_FROM_YEAR_0).try_into().unwrap()),
ymd(MIN_YEAR, 1, 1),
);
}
#[test]
fn test_date_addassignment() {
let ymd = |y, m, d| NaiveDate::from_ymd_opt(y, m, d).unwrap();
let mut date = ymd(2016, 10, 1);
date += TimeDelta::try_days(10).unwrap();
assert_eq!(date, ymd(2016, 10, 11));
date += TimeDelta::try_days(30).unwrap();
assert_eq!(date, ymd(2016, 11, 10));
}
#[test]
fn test_date_subassignment() {
let ymd = |y, m, d| NaiveDate::from_ymd_opt(y, m, d).unwrap();
let mut date = ymd(2016, 10, 11);
date -= TimeDelta::try_days(10).unwrap();
assert_eq!(date, ymd(2016, 10, 1));
date -= TimeDelta::try_days(2).unwrap();
assert_eq!(date, ymd(2016, 9, 29));
}
#[test]
fn test_date_fmt() {
assert_eq!(format!("{:?}", NaiveDate::from_ymd_opt(2012, 3, 4).unwrap()), "2012-03-04");
assert_eq!(format!("{:?}", NaiveDate::from_ymd_opt(0, 3, 4).unwrap()), "0000-03-04");
assert_eq!(format!("{:?}", NaiveDate::from_ymd_opt(-307, 3, 4).unwrap()), "-0307-03-04");
assert_eq!(format!("{:?}", NaiveDate::from_ymd_opt(12345, 3, 4).unwrap()), "+12345-03-04");
assert_eq!(NaiveDate::from_ymd_opt(2012, 3, 4).unwrap().to_string(), "2012-03-04");
assert_eq!(NaiveDate::from_ymd_opt(0, 3, 4).unwrap().to_string(), "0000-03-04");
assert_eq!(NaiveDate::from_ymd_opt(-307, 3, 4).unwrap().to_string(), "-0307-03-04");
assert_eq!(NaiveDate::from_ymd_opt(12345, 3, 4).unwrap().to_string(), "+12345-03-04");
// the format specifier should have no effect on `NaiveTime`
assert_eq!(format!("{:+30?}", NaiveDate::from_ymd_opt(1234, 5, 6).unwrap()), "1234-05-06");
assert_eq!(format!("{:30?}", NaiveDate::from_ymd_opt(12345, 6, 7).unwrap()), "+12345-06-07");
}
#[test]
fn test_date_from_str() {
// valid cases
let valid = [
"-0000000123456-1-2",
" -123456 - 1 - 2 ",
"-12345-1-2",
"-1234-12-31",
"-7-6-5",
"350-2-28",
"360-02-29",
"0360-02-29",
"2015-2 -18",
"2015-02-18",
"+70-2-18",
"+70000-2-18",
"+00007-2-18",
];
for &s in &valid {
eprintln!("test_date_from_str valid {s:?}");
let d = match s.parse::<NaiveDate>() {
Ok(d) => d,
Err(e) => panic!("parsing `{s}` has failed: {e}"),
};
eprintln!("d {d:?} (NaiveDate)");
let s_ = format!("{d:?}");
eprintln!("s_ {s_:?}");
// `s` and `s_` may differ, but `s.parse()` and `s_.parse()` must be same
let d_ = match s_.parse::<NaiveDate>() {
Ok(d) => d,
Err(e) => {
panic!("`{s}` is parsed into `{d:?}`, but reparsing that has failed: {e}")
}
};
eprintln!("d_ {d_:?} (NaiveDate)");
assert!(
d == d_,
"`{s}` is parsed into `{d:?}`, but reparsed result \
`{d_:?}` does not match"
);
}
// some invalid cases
// since `ParseErrorKind` is private, all we can do is to check if there was an error
let invalid = [
"", // empty
"x", // invalid
"Fri, 09 Aug 2013 GMT", // valid date, wrong format
"Sat Jun 30 2012", // valid date, wrong format
"1441497364.649", // valid datetime, wrong format
"+1441497364.649", // valid datetime, wrong format
"+1441497364", // valid datetime, wrong format
"2014/02/03", // valid date, wrong format
"2014", // datetime missing data
"2014-01", // datetime missing data
"2014-01-00", // invalid day
"2014-11-32", // invalid day
"2014-13-01", // invalid month
"2014-13-57", // invalid month, day
"9999999-9-9", // invalid year (out of bounds)
];
for &s in &invalid {
eprintln!("test_date_from_str invalid {s:?}");
assert!(s.parse::<NaiveDate>().is_err());
}
}
#[test]
fn test_date_parse_from_str() {
let ymd = |y, m, d| NaiveDate::from_ymd_opt(y, m, d).unwrap();
assert_eq!(
NaiveDate::parse_from_str("2014-5-7T12:34:56+09:30", "%Y-%m-%dT%H:%M:%S%z"),
Ok(ymd(2014, 5, 7))
); // ignore time and offset
assert_eq!(
NaiveDate::parse_from_str("2015-W06-1=2015-033 Q1", "%G-W%V-%u = %Y-%j Q%q"),
Ok(ymd(2015, 2, 2))
);
assert_eq!(NaiveDate::parse_from_str("Fri, 09 Aug 13", "%a, %d %b %y"), Ok(ymd(2013, 8, 9)));
assert!(NaiveDate::parse_from_str("Sat, 09 Aug 2013", "%a, %d %b %Y").is_err());
assert!(NaiveDate::parse_from_str("2014-57", "%Y-%m-%d").is_err());
assert!(NaiveDate::parse_from_str("2014", "%Y").is_err()); // insufficient
assert!(NaiveDate::parse_from_str("2014-5-7 Q3", "%Y-%m-%d Q%q").is_err()); // mismatched quarter
assert_eq!(
NaiveDate::parse_from_str("2020-01-0", "%Y-%W-%w").ok(),
NaiveDate::from_ymd_opt(2020, 1, 12),
);
assert_eq!(
NaiveDate::parse_from_str("2019-01-0", "%Y-%W-%w").ok(),
NaiveDate::from_ymd_opt(2019, 1, 13),
);
}
#[test]
fn test_day_iterator_limit() {
assert_eq!(NaiveDate::from_ymd_opt(MAX_YEAR, 12, 29).unwrap().iter_days().take(4).count(), 2);
assert_eq!(
NaiveDate::from_ymd_opt(MIN_YEAR, 1, 3).unwrap().iter_days().rev().take(4).count(),
2
);
}
#[test]
fn test_week_iterator_limit() {
assert_eq!(NaiveDate::from_ymd_opt(MAX_YEAR, 12, 12).unwrap().iter_weeks().take(4).count(), 2);
assert_eq!(
NaiveDate::from_ymd_opt(MIN_YEAR, 1, 15).unwrap().iter_weeks().rev().take(4).count(),
2
);
}
#[test]
fn test_weeks_from() {
// tests per: https://github.com/chronotope/chrono/issues/961
// these internally use `weeks_from` via the parsing infrastructure
assert_eq!(
NaiveDate::parse_from_str("2020-01-0", "%Y-%W-%w").ok(),
NaiveDate::from_ymd_opt(2020, 1, 12),
);
assert_eq!(
NaiveDate::parse_from_str("2019-01-0", "%Y-%W-%w").ok(),
NaiveDate::from_ymd_opt(2019, 1, 13),
);
// direct tests
for (y, starts_on) in &[
(2019, Weekday::Tue),
(2020, Weekday::Wed),
(2021, Weekday::Fri),
(2022, Weekday::Sat),
(2023, Weekday::Sun),
(2024, Weekday::Mon),
(2025, Weekday::Wed),
(2026, Weekday::Thu),
] {
for day in &[
Weekday::Mon,
Weekday::Tue,
Weekday::Wed,
Weekday::Thu,
Weekday::Fri,
Weekday::Sat,
Weekday::Sun,
] {
assert_eq!(
NaiveDate::from_ymd_opt(*y, 1, 1).map(|d| d.weeks_from(*day)),
Some(if day == starts_on { 1 } else { 0 })
);
// last day must always be in week 52 or 53
assert!(
[52, 53].contains(&NaiveDate::from_ymd_opt(*y, 12, 31).unwrap().weeks_from(*day)),
);
}
}
let base = NaiveDate::from_ymd_opt(2019, 1, 1).unwrap();
// 400 years covers all year types
for day in &[
Weekday::Mon,
Weekday::Tue,
Weekday::Wed,
Weekday::Thu,
Weekday::Fri,
Weekday::Sat,
Weekday::Sun,
] {
// must always be below 54
for dplus in 1..(400 * 366) {
assert!((base + Days::new(dplus)).weeks_from(*day) < 54)
}
}
}
#[test]
fn test_with_0_overflow() {
let dt = NaiveDate::from_ymd_opt(2023, 4, 18).unwrap();
assert!(dt.with_month0(4294967295).is_none());
assert!(dt.with_day0(4294967295).is_none());
assert!(dt.with_ordinal0(4294967295).is_none());
}
#[test]
fn test_leap_year() {
for year in 0..=MAX_YEAR {
let date = NaiveDate::from_ymd_opt(year, 1, 1).unwrap();
let is_leap = year % 4 == 0 && (year % 100 != 0 || year % 400 == 0);
assert_eq!(date.leap_year(), is_leap);
assert_eq!(date.leap_year(), date.with_ordinal(366).is_some());
}
}
#[test]
fn test_date_yearflags() {
for (year, year_flags, _) in YEAR_FLAGS {
assert_eq!(NaiveDate::from_yo_opt(year, 1).unwrap().year_flags(), year_flags);
}
}
#[test]
fn test_weekday_with_yearflags() {
for (year, year_flags, first_weekday) in YEAR_FLAGS {
let first_day_of_year = NaiveDate::from_yo_opt(year, 1).unwrap();
dbg!(year);
assert_eq!(first_day_of_year.year_flags(), year_flags);
assert_eq!(first_day_of_year.weekday(), first_weekday);
let mut prev = first_day_of_year.weekday();
for ordinal in 2u32..=year_flags.ndays() {
let date = NaiveDate::from_yo_opt(year, ordinal).unwrap();
let expected = prev.succ();
assert_eq!(date.weekday(), expected);
prev = expected;
}
}
}
#[test]
fn test_isoweekdate_with_yearflags() {
for (year, year_flags, _) in YEAR_FLAGS {
// January 4 should be in the first week
let jan4 = NaiveDate::from_ymd_opt(year, 1, 4).unwrap();
let iso_week = jan4.iso_week();
assert_eq!(jan4.year_flags(), year_flags);
assert_eq!(iso_week.week(), 1);
}
}
#[test]
fn test_date_to_mdf_to_date() {
for (year, year_flags, _) in YEAR_FLAGS {
for ordinal in 1..=year_flags.ndays() {
let date = NaiveDate::from_yo_opt(year, ordinal).unwrap();
assert_eq!(date, NaiveDate::from_mdf(date.year(), date.mdf()).unwrap());
}
}
}
// Used for testing some methods with all combinations of `YearFlags`.
// (year, flags, first weekday of year)
const YEAR_FLAGS: [(i32, YearFlags, Weekday); 14] = [
(2006, A, Weekday::Sun),
(2005, B, Weekday::Sat),
(2010, C, Weekday::Fri),
(2009, D, Weekday::Thu),
(2003, E, Weekday::Wed),
(2002, F, Weekday::Tue),
(2001, G, Weekday::Mon),
(2012, AG, Weekday::Sun),
(2000, BA, Weekday::Sat),
(2016, CB, Weekday::Fri),
(2004, DC, Weekday::Thu),
(2020, ED, Weekday::Wed),
(2008, FE, Weekday::Tue),
(2024, GF, Weekday::Mon),
];
#[test]
#[cfg(feature = "rkyv-validation")]
fn test_rkyv_validation() {
let date_min = NaiveDate::MIN;
let bytes = rkyv::to_bytes::<_, 4>(&date_min).unwrap();
assert_eq!(rkyv::from_bytes::<NaiveDate>(&bytes).unwrap(), date_min);
let date_max = NaiveDate::MAX;
let bytes = rkyv::to_bytes::<_, 4>(&date_max).unwrap();
assert_eq!(rkyv::from_bytes::<NaiveDate>(&bytes).unwrap(), date_max);
}
// MAX_YEAR-12-31 minus 0000-01-01
// = (MAX_YEAR-12-31 minus 0000-12-31) + (0000-12-31 - 0000-01-01)
// = MAX_YEAR * 365 + (# of leap years from 0001 to MAX_YEAR) + 365
// = (MAX_YEAR + 1) * 365 + (# of leap years from 0001 to MAX_YEAR)
const MAX_DAYS_FROM_YEAR_0: i32 =
(MAX_YEAR + 1) * 365 + MAX_YEAR / 4 - MAX_YEAR / 100 + MAX_YEAR / 400;
// MIN_YEAR-01-01 minus 0000-01-01
// = MIN_YEAR * 365 + (# of leap years from MIN_YEAR to 0000)
const MIN_DAYS_FROM_YEAR_0: i32 = MIN_YEAR * 365 + MIN_YEAR / 4 - MIN_YEAR / 100 + MIN_YEAR / 400;
// only used for testing, but duplicated in naive::datetime
const MAX_BITS: usize = 44;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,405 @@
use super::NaiveDateTime;
use crate::{Datelike, FixedOffset, MappedLocalTime, NaiveDate, TimeDelta, Utc};
#[test]
fn test_datetime_add() {
fn check(
(y, m, d, h, n, s): (i32, u32, u32, u32, u32, u32),
rhs: TimeDelta,
result: Option<(i32, u32, u32, u32, u32, u32)>,
) {
let lhs = NaiveDate::from_ymd_opt(y, m, d).unwrap().and_hms_opt(h, n, s).unwrap();
let sum = result.map(|(y, m, d, h, n, s)| {
NaiveDate::from_ymd_opt(y, m, d).unwrap().and_hms_opt(h, n, s).unwrap()
});
assert_eq!(lhs.checked_add_signed(rhs), sum);
assert_eq!(lhs.checked_sub_signed(-rhs), sum);
}
let seconds = |s| TimeDelta::try_seconds(s).unwrap();
check((2014, 5, 6, 7, 8, 9), seconds(3600 + 60 + 1), Some((2014, 5, 6, 8, 9, 10)));
check((2014, 5, 6, 7, 8, 9), seconds(-(3600 + 60 + 1)), Some((2014, 5, 6, 6, 7, 8)));
check((2014, 5, 6, 7, 8, 9), seconds(86399), Some((2014, 5, 7, 7, 8, 8)));
check((2014, 5, 6, 7, 8, 9), seconds(86_400 * 10), Some((2014, 5, 16, 7, 8, 9)));
check((2014, 5, 6, 7, 8, 9), seconds(-86_400 * 10), Some((2014, 4, 26, 7, 8, 9)));
check((2014, 5, 6, 7, 8, 9), seconds(86_400 * 10), Some((2014, 5, 16, 7, 8, 9)));
// overflow check
// assumes that we have correct values for MAX/MIN_DAYS_FROM_YEAR_0 from `naive::date`.
// (they are private constants, but the equivalence is tested in that module.)
let max_days_from_year_0 =
NaiveDate::MAX.signed_duration_since(NaiveDate::from_ymd_opt(0, 1, 1).unwrap());
check((0, 1, 1, 0, 0, 0), max_days_from_year_0, Some((NaiveDate::MAX.year(), 12, 31, 0, 0, 0)));
check(
(0, 1, 1, 0, 0, 0),
max_days_from_year_0 + seconds(86399),
Some((NaiveDate::MAX.year(), 12, 31, 23, 59, 59)),
);
check((0, 1, 1, 0, 0, 0), max_days_from_year_0 + seconds(86_400), None);
check((0, 1, 1, 0, 0, 0), TimeDelta::MAX, None);
let min_days_from_year_0 =
NaiveDate::MIN.signed_duration_since(NaiveDate::from_ymd_opt(0, 1, 1).unwrap());
check((0, 1, 1, 0, 0, 0), min_days_from_year_0, Some((NaiveDate::MIN.year(), 1, 1, 0, 0, 0)));
check((0, 1, 1, 0, 0, 0), min_days_from_year_0 - seconds(1), None);
check((0, 1, 1, 0, 0, 0), TimeDelta::MIN, None);
}
#[test]
fn test_datetime_sub() {
let ymdhms =
|y, m, d, h, n, s| NaiveDate::from_ymd_opt(y, m, d).unwrap().and_hms_opt(h, n, s).unwrap();
let since = NaiveDateTime::signed_duration_since;
assert_eq!(since(ymdhms(2014, 5, 6, 7, 8, 9), ymdhms(2014, 5, 6, 7, 8, 9)), TimeDelta::zero());
assert_eq!(
since(ymdhms(2014, 5, 6, 7, 8, 10), ymdhms(2014, 5, 6, 7, 8, 9)),
TimeDelta::try_seconds(1).unwrap()
);
assert_eq!(
since(ymdhms(2014, 5, 6, 7, 8, 9), ymdhms(2014, 5, 6, 7, 8, 10)),
TimeDelta::try_seconds(-1).unwrap()
);
assert_eq!(
since(ymdhms(2014, 5, 7, 7, 8, 9), ymdhms(2014, 5, 6, 7, 8, 10)),
TimeDelta::try_seconds(86399).unwrap()
);
assert_eq!(
since(ymdhms(2001, 9, 9, 1, 46, 39), ymdhms(1970, 1, 1, 0, 0, 0)),
TimeDelta::try_seconds(999_999_999).unwrap()
);
}
#[test]
fn test_datetime_addassignment() {
let ymdhms =
|y, m, d, h, n, s| NaiveDate::from_ymd_opt(y, m, d).unwrap().and_hms_opt(h, n, s).unwrap();
let mut date = ymdhms(2016, 10, 1, 10, 10, 10);
date += TimeDelta::try_minutes(10_000_000).unwrap();
assert_eq!(date, ymdhms(2035, 10, 6, 20, 50, 10));
date += TimeDelta::try_days(10).unwrap();
assert_eq!(date, ymdhms(2035, 10, 16, 20, 50, 10));
}
#[test]
fn test_datetime_subassignment() {
let ymdhms =
|y, m, d, h, n, s| NaiveDate::from_ymd_opt(y, m, d).unwrap().and_hms_opt(h, n, s).unwrap();
let mut date = ymdhms(2016, 10, 1, 10, 10, 10);
date -= TimeDelta::try_minutes(10_000_000).unwrap();
assert_eq!(date, ymdhms(1997, 9, 26, 23, 30, 10));
date -= TimeDelta::try_days(10).unwrap();
assert_eq!(date, ymdhms(1997, 9, 16, 23, 30, 10));
}
#[test]
fn test_core_duration_ops() {
use core::time::Duration;
let mut dt = NaiveDate::from_ymd_opt(2023, 8, 29).unwrap().and_hms_opt(11, 34, 12).unwrap();
let same = dt + Duration::ZERO;
assert_eq!(dt, same);
dt += Duration::new(3600, 0);
assert_eq!(dt, NaiveDate::from_ymd_opt(2023, 8, 29).unwrap().and_hms_opt(12, 34, 12).unwrap());
}
#[test]
#[should_panic]
fn test_core_duration_max() {
use core::time::Duration;
let mut utc_dt = NaiveDate::from_ymd_opt(2023, 8, 29).unwrap().and_hms_opt(11, 34, 12).unwrap();
utc_dt += Duration::MAX;
}
#[test]
fn test_datetime_from_str() {
// valid cases
let valid = [
"2001-02-03T04:05:06",
"2012-12-12T12:12:12",
"2015-02-18T23:16:09.153",
"2015-2-18T23:16:09.153",
"-77-02-18T23:16:09",
"+82701-05-6T15:9:60.898989898989",
" +82701 - 05 - 6 T 15 : 9 : 60.898989898989 ",
];
for &s in &valid {
eprintln!("test_parse_naivedatetime valid {s:?}");
let d = match s.parse::<NaiveDateTime>() {
Ok(d) => d,
Err(e) => panic!("parsing `{s}` has failed: {e}"),
};
let s_ = format!("{d:?}");
// `s` and `s_` may differ, but `s.parse()` and `s_.parse()` must be same
let d_ = match s_.parse::<NaiveDateTime>() {
Ok(d) => d,
Err(e) => {
panic!("`{s}` is parsed into `{d:?}`, but reparsing that has failed: {e}")
}
};
assert!(
d == d_,
"`{s}` is parsed into `{d:?}`, but reparsed result \
`{d_:?}` does not match"
);
}
// some invalid cases
// since `ParseErrorKind` is private, all we can do is to check if there was an error
let invalid = [
"", // empty
"x", // invalid / missing data
"15", // missing data
"15:8:9", // looks like a time (invalid date)
"15-8-9", // looks like a date (invalid)
"Fri, 09 Aug 2013 23:54:35 GMT", // valid date, wrong format
"Sat Jun 30 23:59:60 2012", // valid date, wrong format
"1441497364.649", // valid date, wrong format
"+1441497364.649", // valid date, wrong format
"+1441497364", // valid date, wrong format
"2014/02/03 04:05:06", // valid date, wrong format
"2015-15-15T15:15:15", // invalid date
"2012-12-12T12:12:12x", // bad timezone / trailing literal
"2012-12-12T12:12:12+00:00", // unexpected timezone / trailing literal
"2012-12-12T12:12:12 +00:00", // unexpected timezone / trailing literal
"2012-12-12T12:12:12 GMT", // unexpected timezone / trailing literal
"2012-123-12T12:12:12", // invalid month
"2012-12-12t12:12:12", // bad divider 't'
"2012-12-12 12:12:12", // missing divider 'T'
"2012-12-12T12:12:12Z", // trailing char 'Z'
"+ 82701-123-12T12:12:12", // strange year, invalid month
"+802701-123-12T12:12:12", // out-of-bound year, invalid month
];
for &s in &invalid {
eprintln!("test_datetime_from_str invalid {s:?}");
assert!(s.parse::<NaiveDateTime>().is_err());
}
}
#[test]
fn test_datetime_parse_from_str() {
let ymdhms =
|y, m, d, h, n, s| NaiveDate::from_ymd_opt(y, m, d).unwrap().and_hms_opt(h, n, s).unwrap();
let ymdhmsn = |y, m, d, h, n, s, nano| {
NaiveDate::from_ymd_opt(y, m, d).unwrap().and_hms_nano_opt(h, n, s, nano).unwrap()
};
assert_eq!(
NaiveDateTime::parse_from_str("2014-5-7T12:34:56+09:30", "%Y-%m-%dT%H:%M:%S%z"),
Ok(ymdhms(2014, 5, 7, 12, 34, 56))
); // ignore offset
assert_eq!(
NaiveDateTime::parse_from_str("2015-W06-1 000000", "%G-W%V-%u%H%M%S"),
Ok(ymdhms(2015, 2, 2, 0, 0, 0))
);
assert_eq!(
NaiveDateTime::parse_from_str("Fri, 09 Aug 2013 23:54:35 GMT", "%a, %d %b %Y %H:%M:%S GMT"),
Ok(ymdhms(2013, 8, 9, 23, 54, 35))
);
assert!(
NaiveDateTime::parse_from_str("Sat, 09 Aug 2013 23:54:35 GMT", "%a, %d %b %Y %H:%M:%S GMT")
.is_err()
);
assert!(NaiveDateTime::parse_from_str("2014-5-7 Q2 12:3456", "%Y-%m-%d Q%q %H:%M:%S").is_err());
assert!(NaiveDateTime::parse_from_str("12:34:56", "%H:%M:%S").is_err()); // insufficient
assert_eq!(
NaiveDateTime::parse_from_str("1441497364", "%s"),
Ok(ymdhms(2015, 9, 5, 23, 56, 4))
);
assert_eq!(
NaiveDateTime::parse_from_str("1283929614.1234", "%s.%f"),
Ok(ymdhmsn(2010, 9, 8, 7, 6, 54, 1234))
);
assert_eq!(
NaiveDateTime::parse_from_str("1441497364.649", "%s%.3f"),
Ok(ymdhmsn(2015, 9, 5, 23, 56, 4, 649000000))
);
assert_eq!(
NaiveDateTime::parse_from_str("1497854303.087654", "%s%.6f"),
Ok(ymdhmsn(2017, 6, 19, 6, 38, 23, 87654000))
);
assert_eq!(
NaiveDateTime::parse_from_str("1437742189.918273645", "%s%.9f"),
Ok(ymdhmsn(2015, 7, 24, 12, 49, 49, 918273645))
);
}
#[test]
fn test_datetime_parse_from_str_with_spaces() {
let parse_from_str = NaiveDateTime::parse_from_str;
let dt = NaiveDate::from_ymd_opt(2013, 8, 9).unwrap().and_hms_opt(23, 54, 35).unwrap();
// with varying spaces - should succeed
assert_eq!(parse_from_str(" Aug 09 2013 23:54:35", " %b %d %Y %H:%M:%S"), Ok(dt));
assert_eq!(parse_from_str("Aug 09 2013 23:54:35 ", "%b %d %Y %H:%M:%S "), Ok(dt));
assert_eq!(parse_from_str(" Aug 09 2013 23:54:35 ", " %b %d %Y %H:%M:%S "), Ok(dt));
assert_eq!(parse_from_str(" Aug 09 2013 23:54:35", " %b %d %Y %H:%M:%S"), Ok(dt));
assert_eq!(parse_from_str(" Aug 09 2013 23:54:35", " %b %d %Y %H:%M:%S"), Ok(dt));
assert_eq!(parse_from_str("\n\tAug 09 2013 23:54:35 ", "\n\t%b %d %Y %H:%M:%S "), Ok(dt));
assert_eq!(parse_from_str("\tAug 09 2013 23:54:35\t", "\t%b %d %Y %H:%M:%S\t"), Ok(dt));
assert_eq!(parse_from_str("Aug 09 2013 23:54:35", "%b %d %Y %H:%M:%S"), Ok(dt));
assert_eq!(parse_from_str("Aug 09 2013 23:54:35", "%b %d %Y %H:%M:%S"), Ok(dt));
assert_eq!(parse_from_str("Aug 09 2013\t23:54:35", "%b %d %Y\t%H:%M:%S"), Ok(dt));
assert_eq!(parse_from_str("Aug 09 2013\t\t23:54:35", "%b %d %Y\t\t%H:%M:%S"), Ok(dt));
assert_eq!(parse_from_str("Aug 09 2013 23:54:35 ", "%b %d %Y %H:%M:%S\n"), Ok(dt));
assert_eq!(parse_from_str("Aug 09 2013 23:54:35", "%b %d %Y\t%H:%M:%S"), Ok(dt));
assert_eq!(parse_from_str("Aug 09 2013 23:54:35", "%b %d %Y %H:%M:%S "), Ok(dt));
assert_eq!(parse_from_str("Aug 09 2013 23:54:35", " %b %d %Y %H:%M:%S"), Ok(dt));
assert_eq!(parse_from_str("Aug 09 2013 23:54:35", "%b %d %Y %H:%M:%S\n"), Ok(dt));
// with varying spaces - should fail
// leading space in data
assert!(parse_from_str(" Aug 09 2013 23:54:35", "%b %d %Y %H:%M:%S").is_err());
// trailing space in data
assert!(parse_from_str("Aug 09 2013 23:54:35 ", "%b %d %Y %H:%M:%S").is_err());
// trailing tab in data
assert!(parse_from_str("Aug 09 2013 23:54:35\t", "%b %d %Y %H:%M:%S").is_err());
// mismatched newlines
assert!(parse_from_str("\nAug 09 2013 23:54:35", "%b %d %Y %H:%M:%S\n").is_err());
// trailing literal in data
assert!(parse_from_str("Aug 09 2013 23:54:35 !!!", "%b %d %Y %H:%M:%S ").is_err());
}
#[test]
fn test_datetime_add_sub_invariant() {
// issue #37
let base = NaiveDate::from_ymd_opt(2000, 1, 1).unwrap().and_hms_opt(0, 0, 0).unwrap();
let t = -946684799990000;
let time = base + TimeDelta::microseconds(t);
assert_eq!(t, time.signed_duration_since(base).num_microseconds().unwrap());
}
#[test]
fn test_and_local_timezone() {
let ndt = NaiveDate::from_ymd_opt(2022, 6, 15).unwrap().and_hms_opt(18, 59, 36).unwrap();
let dt_utc = ndt.and_utc();
assert_eq!(dt_utc.naive_local(), ndt);
assert_eq!(dt_utc.timezone(), Utc);
let offset_tz = FixedOffset::west_opt(4 * 3600).unwrap();
let dt_offset = ndt.and_local_timezone(offset_tz).unwrap();
assert_eq!(dt_offset.naive_local(), ndt);
assert_eq!(dt_offset.timezone(), offset_tz);
}
#[test]
fn test_and_utc() {
let ndt = NaiveDate::from_ymd_opt(2023, 1, 30).unwrap().and_hms_opt(19, 32, 33).unwrap();
let dt_utc = ndt.and_utc();
assert_eq!(dt_utc.naive_local(), ndt);
assert_eq!(dt_utc.timezone(), Utc);
}
#[test]
fn test_checked_add_offset() {
let ymdhmsm = |y, m, d, h, mn, s, mi| {
NaiveDate::from_ymd_opt(y, m, d).unwrap().and_hms_milli_opt(h, mn, s, mi)
};
let positive_offset = FixedOffset::east_opt(2 * 60 * 60).unwrap();
// regular date
let dt = ymdhmsm(2023, 5, 5, 20, 10, 0, 0).unwrap();
assert_eq!(dt.checked_add_offset(positive_offset), ymdhmsm(2023, 5, 5, 22, 10, 0, 0));
// leap second is preserved
let dt = ymdhmsm(2023, 6, 30, 23, 59, 59, 1_000).unwrap();
assert_eq!(dt.checked_add_offset(positive_offset), ymdhmsm(2023, 7, 1, 1, 59, 59, 1_000));
// out of range
assert!(NaiveDateTime::MAX.checked_add_offset(positive_offset).is_none());
let negative_offset = FixedOffset::west_opt(2 * 60 * 60).unwrap();
// regular date
let dt = ymdhmsm(2023, 5, 5, 20, 10, 0, 0).unwrap();
assert_eq!(dt.checked_add_offset(negative_offset), ymdhmsm(2023, 5, 5, 18, 10, 0, 0));
// leap second is preserved
let dt = ymdhmsm(2023, 6, 30, 23, 59, 59, 1_000).unwrap();
assert_eq!(dt.checked_add_offset(negative_offset), ymdhmsm(2023, 6, 30, 21, 59, 59, 1_000));
// out of range
assert!(NaiveDateTime::MIN.checked_add_offset(negative_offset).is_none());
}
#[test]
fn test_checked_sub_offset() {
let ymdhmsm = |y, m, d, h, mn, s, mi| {
NaiveDate::from_ymd_opt(y, m, d).unwrap().and_hms_milli_opt(h, mn, s, mi)
};
let positive_offset = FixedOffset::east_opt(2 * 60 * 60).unwrap();
// regular date
let dt = ymdhmsm(2023, 5, 5, 20, 10, 0, 0).unwrap();
assert_eq!(dt.checked_sub_offset(positive_offset), ymdhmsm(2023, 5, 5, 18, 10, 0, 0));
// leap second is preserved
let dt = ymdhmsm(2023, 6, 30, 23, 59, 59, 1_000).unwrap();
assert_eq!(dt.checked_sub_offset(positive_offset), ymdhmsm(2023, 6, 30, 21, 59, 59, 1_000));
// out of range
assert!(NaiveDateTime::MIN.checked_sub_offset(positive_offset).is_none());
let negative_offset = FixedOffset::west_opt(2 * 60 * 60).unwrap();
// regular date
let dt = ymdhmsm(2023, 5, 5, 20, 10, 0, 0).unwrap();
assert_eq!(dt.checked_sub_offset(negative_offset), ymdhmsm(2023, 5, 5, 22, 10, 0, 0));
// leap second is preserved
let dt = ymdhmsm(2023, 6, 30, 23, 59, 59, 1_000).unwrap();
assert_eq!(dt.checked_sub_offset(negative_offset), ymdhmsm(2023, 7, 1, 1, 59, 59, 1_000));
// out of range
assert!(NaiveDateTime::MAX.checked_sub_offset(negative_offset).is_none());
assert_eq!(dt.checked_add_offset(positive_offset), Some(dt + positive_offset));
assert_eq!(dt.checked_sub_offset(positive_offset), Some(dt - positive_offset));
}
#[test]
fn test_overflowing_add_offset() {
let ymdhmsm = |y, m, d, h, mn, s, mi| {
NaiveDate::from_ymd_opt(y, m, d).unwrap().and_hms_milli_opt(h, mn, s, mi).unwrap()
};
let positive_offset = FixedOffset::east_opt(2 * 60 * 60).unwrap();
// regular date
let dt = ymdhmsm(2023, 5, 5, 20, 10, 0, 0);
assert_eq!(dt.overflowing_add_offset(positive_offset), ymdhmsm(2023, 5, 5, 22, 10, 0, 0));
// leap second is preserved
let dt = ymdhmsm(2023, 6, 30, 23, 59, 59, 1_000);
assert_eq!(dt.overflowing_add_offset(positive_offset), ymdhmsm(2023, 7, 1, 1, 59, 59, 1_000));
// out of range
assert!(NaiveDateTime::MAX.overflowing_add_offset(positive_offset) > NaiveDateTime::MAX);
let negative_offset = FixedOffset::west_opt(2 * 60 * 60).unwrap();
// regular date
let dt = ymdhmsm(2023, 5, 5, 20, 10, 0, 0);
assert_eq!(dt.overflowing_add_offset(negative_offset), ymdhmsm(2023, 5, 5, 18, 10, 0, 0));
// leap second is preserved
let dt = ymdhmsm(2023, 6, 30, 23, 59, 59, 1_000);
assert_eq!(dt.overflowing_add_offset(negative_offset), ymdhmsm(2023, 6, 30, 21, 59, 59, 1_000));
// out of range
assert!(NaiveDateTime::MIN.overflowing_add_offset(negative_offset) < NaiveDateTime::MIN);
}
#[test]
fn test_and_timezone_min_max_dates() {
for offset_hour in -23..=23 {
dbg!(offset_hour);
let offset = FixedOffset::east_opt(offset_hour * 60 * 60).unwrap();
let local_max = NaiveDateTime::MAX.and_local_timezone(offset);
if offset_hour >= 0 {
assert_eq!(local_max.unwrap().naive_local(), NaiveDateTime::MAX);
} else {
assert_eq!(local_max, MappedLocalTime::None);
}
let local_min = NaiveDateTime::MIN.and_local_timezone(offset);
if offset_hour <= 0 {
assert_eq!(local_min.unwrap().naive_local(), NaiveDateTime::MIN);
} else {
assert_eq!(local_min, MappedLocalTime::None);
}
}
}
#[test]
#[cfg(feature = "rkyv-validation")]
fn test_rkyv_validation() {
let dt_min = NaiveDateTime::MIN;
let bytes = rkyv::to_bytes::<_, 12>(&dt_min).unwrap();
assert_eq!(rkyv::from_bytes::<NaiveDateTime>(&bytes).unwrap(), dt_min);
let dt_max = NaiveDateTime::MAX;
let bytes = rkyv::to_bytes::<_, 12>(&dt_max).unwrap();
assert_eq!(rkyv::from_bytes::<NaiveDateTime>(&bytes).unwrap(), dt_max);
}

View File

@@ -0,0 +1,591 @@
//! Internal helper types for working with dates.
#![cfg_attr(feature = "__internal_bench", allow(missing_docs))]
use core::fmt;
/// Year flags (aka the dominical letter).
///
/// `YearFlags` are used as the last four bits of `NaiveDate`, `Mdf` and `IsoWeek`.
///
/// There are 14 possible classes of year in the Gregorian calendar:
/// common and leap years starting with Monday through Sunday.
///
/// The `YearFlags` stores this information into 4 bits `LWWW`. `L` is the leap year flag, with `1`
/// for the common year (this simplifies validating an ordinal in `NaiveDate`). `WWW` is a non-zero
/// `Weekday` of the last day in the preceding year.
#[allow(unreachable_pub)] // public as an alias for benchmarks only
#[derive(PartialEq, Eq, Copy, Clone, Hash)]
pub struct YearFlags(pub(super) u8);
// Weekday of the last day in the preceding year.
// Allows for quick day of week calculation from the 1-based ordinal.
const YEAR_STARTS_AFTER_MONDAY: u8 = 7; // non-zero to allow use with `NonZero*`.
const YEAR_STARTS_AFTER_THUESDAY: u8 = 1;
const YEAR_STARTS_AFTER_WEDNESDAY: u8 = 2;
const YEAR_STARTS_AFTER_THURSDAY: u8 = 3;
const YEAR_STARTS_AFTER_FRIDAY: u8 = 4;
const YEAR_STARTS_AFTER_SATURDAY: u8 = 5;
const YEAR_STARTS_AFTER_SUNDAY: u8 = 6;
const COMMON_YEAR: u8 = 1 << 3;
const LEAP_YEAR: u8 = 0 << 3;
pub(super) const A: YearFlags = YearFlags(COMMON_YEAR | YEAR_STARTS_AFTER_SATURDAY);
pub(super) const AG: YearFlags = YearFlags(LEAP_YEAR | YEAR_STARTS_AFTER_SATURDAY);
pub(super) const B: YearFlags = YearFlags(COMMON_YEAR | YEAR_STARTS_AFTER_FRIDAY);
pub(super) const BA: YearFlags = YearFlags(LEAP_YEAR | YEAR_STARTS_AFTER_FRIDAY);
pub(super) const C: YearFlags = YearFlags(COMMON_YEAR | YEAR_STARTS_AFTER_THURSDAY);
pub(super) const CB: YearFlags = YearFlags(LEAP_YEAR | YEAR_STARTS_AFTER_THURSDAY);
pub(super) const D: YearFlags = YearFlags(COMMON_YEAR | YEAR_STARTS_AFTER_WEDNESDAY);
pub(super) const DC: YearFlags = YearFlags(LEAP_YEAR | YEAR_STARTS_AFTER_WEDNESDAY);
pub(super) const E: YearFlags = YearFlags(COMMON_YEAR | YEAR_STARTS_AFTER_THUESDAY);
pub(super) const ED: YearFlags = YearFlags(LEAP_YEAR | YEAR_STARTS_AFTER_THUESDAY);
pub(super) const F: YearFlags = YearFlags(COMMON_YEAR | YEAR_STARTS_AFTER_MONDAY);
pub(super) const FE: YearFlags = YearFlags(LEAP_YEAR | YEAR_STARTS_AFTER_MONDAY);
pub(super) const G: YearFlags = YearFlags(COMMON_YEAR | YEAR_STARTS_AFTER_SUNDAY);
pub(super) const GF: YearFlags = YearFlags(LEAP_YEAR | YEAR_STARTS_AFTER_SUNDAY);
const YEAR_TO_FLAGS: &[YearFlags; 400] = &[
BA, G, F, E, DC, B, A, G, FE, D, C, B, AG, F, E, D, CB, A, G, F, ED, C, B, A, GF, E, D, C, BA,
G, F, E, DC, B, A, G, FE, D, C, B, AG, F, E, D, CB, A, G, F, ED, C, B, A, GF, E, D, C, BA, G,
F, E, DC, B, A, G, FE, D, C, B, AG, F, E, D, CB, A, G, F, ED, C, B, A, GF, E, D, C, BA, G, F,
E, DC, B, A, G, FE, D, C, B, AG, F, E, D, // 100
C, B, A, G, FE, D, C, B, AG, F, E, D, CB, A, G, F, ED, C, B, A, GF, E, D, C, BA, G, F, E, DC,
B, A, G, FE, D, C, B, AG, F, E, D, CB, A, G, F, ED, C, B, A, GF, E, D, C, BA, G, F, E, DC, B,
A, G, FE, D, C, B, AG, F, E, D, CB, A, G, F, ED, C, B, A, GF, E, D, C, BA, G, F, E, DC, B, A,
G, FE, D, C, B, AG, F, E, D, CB, A, G, F, // 200
E, D, C, B, AG, F, E, D, CB, A, G, F, ED, C, B, A, GF, E, D, C, BA, G, F, E, DC, B, A, G, FE,
D, C, B, AG, F, E, D, CB, A, G, F, ED, C, B, A, GF, E, D, C, BA, G, F, E, DC, B, A, G, FE, D,
C, B, AG, F, E, D, CB, A, G, F, ED, C, B, A, GF, E, D, C, BA, G, F, E, DC, B, A, G, FE, D, C,
B, AG, F, E, D, CB, A, G, F, ED, C, B, A, // 300
G, F, E, D, CB, A, G, F, ED, C, B, A, GF, E, D, C, BA, G, F, E, DC, B, A, G, FE, D, C, B, AG,
F, E, D, CB, A, G, F, ED, C, B, A, GF, E, D, C, BA, G, F, E, DC, B, A, G, FE, D, C, B, AG, F,
E, D, CB, A, G, F, ED, C, B, A, GF, E, D, C, BA, G, F, E, DC, B, A, G, FE, D, C, B, AG, F, E,
D, CB, A, G, F, ED, C, B, A, GF, E, D, C, // 400
];
impl YearFlags {
#[allow(unreachable_pub)] // public as an alias for benchmarks only
#[doc(hidden)] // for benchmarks only
#[inline]
#[must_use]
pub const fn from_year(year: i32) -> YearFlags {
let year = year.rem_euclid(400);
YearFlags::from_year_mod_400(year)
}
#[inline]
pub(super) const fn from_year_mod_400(year: i32) -> YearFlags {
YEAR_TO_FLAGS[year as usize]
}
#[inline]
pub(super) const fn ndays(&self) -> u32 {
let YearFlags(flags) = *self;
366 - (flags >> 3) as u32
}
#[inline]
pub(super) const fn isoweek_delta(&self) -> u32 {
let YearFlags(flags) = *self;
let mut delta = (flags & 0b0111) as u32;
if delta < 3 {
delta += 7;
}
delta
}
#[inline]
pub(super) const fn nisoweeks(&self) -> u32 {
let YearFlags(flags) = *self;
52 + ((0b0000_0100_0000_0110 >> flags as usize) & 1)
}
}
impl fmt::Debug for YearFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let YearFlags(flags) = *self;
match flags {
0o15 => "A".fmt(f),
0o05 => "AG".fmt(f),
0o14 => "B".fmt(f),
0o04 => "BA".fmt(f),
0o13 => "C".fmt(f),
0o03 => "CB".fmt(f),
0o12 => "D".fmt(f),
0o02 => "DC".fmt(f),
0o11 => "E".fmt(f),
0o01 => "ED".fmt(f),
0o10 => "F?".fmt(f),
0o00 => "FE?".fmt(f), // non-canonical
0o17 => "F".fmt(f),
0o07 => "FE".fmt(f),
0o16 => "G".fmt(f),
0o06 => "GF".fmt(f),
_ => write!(f, "YearFlags({flags})"),
}
}
}
// OL: (ordinal << 1) | leap year flag
const MAX_OL: u32 = 366 << 1; // `(366 << 1) | 1` would be day 366 in a non-leap year
const MAX_MDL: u32 = (12 << 6) | (31 << 1) | 1;
// The next table are adjustment values to convert a date encoded as month-day-leapyear to
// ordinal-leapyear. OL = MDL - adjustment.
// Dates that do not exist are encoded as `XX`.
const XX: i8 = 0;
const MDL_TO_OL: &[i8; MAX_MDL as usize + 1] = &[
XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX,
XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX,
XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, XX, // 0
XX, XX, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, // 1
XX, XX, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66,
66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66,
66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, XX, XX, XX, XX, XX, // 2
XX, XX, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74,
72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74,
72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, // 3
XX, XX, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76,
74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76,
74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, XX, XX, // 4
XX, XX, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80,
78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80,
78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, // 5
XX, XX, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82,
80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82,
80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, XX, XX, // 6
XX, XX, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86,
84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86,
84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, // 7
XX, XX, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88,
86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88,
86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, // 8
XX, XX, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90,
88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90,
88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, XX, XX, // 9
XX, XX, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94,
92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94,
92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, // 10
XX, XX, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96,
94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96,
94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, XX, XX, // 11
XX, XX, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98,
100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100,
98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98,
100, // 12
];
const OL_TO_MDL: &[u8; MAX_OL as usize + 1] = &[
0, 0, // 0
64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64,
64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, // 1
66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66,
66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66,
66, 66, 66, 66, 66, 66, 66, 66, 66, // 2
74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72,
74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72,
74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, 74, 72, // 3
76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74,
76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74,
76, 74, 76, 74, 76, 74, 76, 74, 76, 74, 76, 74, // 4
80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78,
80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78,
80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, 80, 78, // 5
82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80,
82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80,
82, 80, 82, 80, 82, 80, 82, 80, 82, 80, 82, 80, // 6
86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84,
86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84,
86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, 86, 84, // 7
88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86,
88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86,
88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, 88, 86, // 8
90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88,
90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88,
90, 88, 90, 88, 90, 88, 90, 88, 90, 88, 90, 88, // 9
94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92,
94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92,
94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, 94, 92, // 10
96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94,
96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94,
96, 94, 96, 94, 96, 94, 96, 94, 96, 94, 96, 94, // 11
100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100,
98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98,
100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100, 98, 100,
98, // 12
];
/// Month, day of month and year flags: `(month << 9) | (day << 4) | flags`
/// `M_MMMD_DDDD_LFFF`
///
/// The whole bits except for the least 3 bits are referred as `Mdl` (month, day of month, and leap
/// year flag), which is an index to the `MDL_TO_OL` lookup table.
///
/// The conversion between the packed calendar date (`Mdf`) and the ordinal date (`NaiveDate`) is
/// based on the moderately-sized lookup table (~1.5KB) and the packed representation is chosen for
/// efficient lookup.
///
/// The methods of `Mdf` validate their inputs as late as possible. Dates that can't exist, like
/// February 30, can still be represented. This allows the validation to be combined with the final
/// table lookup, which is good for performance.
#[derive(PartialEq, PartialOrd, Copy, Clone)]
pub(super) struct Mdf(u32);
impl Mdf {
/// Makes a new `Mdf` value from month, day and `YearFlags`.
///
/// This method doesn't fully validate the range of the `month` and `day` parameters, only as
/// much as what can't be deferred until later. The year `flags` are trusted to be correct.
///
/// # Errors
///
/// Returns `None` if `month > 12` or `day > 31`.
#[inline]
pub(super) const fn new(month: u32, day: u32, YearFlags(flags): YearFlags) -> Option<Mdf> {
match month <= 12 && day <= 31 {
true => Some(Mdf((month << 9) | (day << 4) | flags as u32)),
false => None,
}
}
/// Makes a new `Mdf` value from an `i32` with an ordinal and a leap year flag, and year
/// `flags`.
///
/// The `ol` is trusted to be valid, and the `flags` are trusted to match it.
#[inline]
pub(super) const fn from_ol(ol: i32, YearFlags(flags): YearFlags) -> Mdf {
debug_assert!(ol > 1 && ol <= MAX_OL as i32);
// Array is indexed from `[2..=MAX_OL]`, with a `0` index having a meaningless value.
Mdf(((ol as u32 + OL_TO_MDL[ol as usize] as u32) << 3) | flags as u32)
}
/// Returns the month of this `Mdf`.
#[inline]
pub(super) const fn month(&self) -> u32 {
let Mdf(mdf) = *self;
mdf >> 9
}
/// Replaces the month of this `Mdf`, keeping the day and flags.
///
/// # Errors
///
/// Returns `None` if `month > 12`.
#[inline]
pub(super) const fn with_month(&self, month: u32) -> Option<Mdf> {
if month > 12 {
return None;
}
let Mdf(mdf) = *self;
Some(Mdf((mdf & 0b1_1111_1111) | (month << 9)))
}
/// Returns the day of this `Mdf`.
#[inline]
pub(super) const fn day(&self) -> u32 {
let Mdf(mdf) = *self;
(mdf >> 4) & 0b1_1111
}
/// Replaces the day of this `Mdf`, keeping the month and flags.
///
/// # Errors
///
/// Returns `None` if `day > 31`.
#[inline]
pub(super) const fn with_day(&self, day: u32) -> Option<Mdf> {
if day > 31 {
return None;
}
let Mdf(mdf) = *self;
Some(Mdf((mdf & !0b1_1111_0000) | (day << 4)))
}
/// Replaces the flags of this `Mdf`, keeping the month and day.
#[inline]
pub(super) const fn with_flags(&self, YearFlags(flags): YearFlags) -> Mdf {
let Mdf(mdf) = *self;
Mdf((mdf & !0b1111) | flags as u32)
}
/// Returns the ordinal that corresponds to this `Mdf`.
///
/// This does a table lookup to calculate the corresponding ordinal. It will return an error if
/// the `Mdl` turns out not to be a valid date.
///
/// # Errors
///
/// Returns `None` if `month == 0` or `day == 0`, or if a the given day does not exist in the
/// given month.
#[inline]
pub(super) const fn ordinal(&self) -> Option<u32> {
let mdl = self.0 >> 3;
match MDL_TO_OL[mdl as usize] {
XX => None,
v => Some((mdl - v as u8 as u32) >> 1),
}
}
/// Returns the year flags of this `Mdf`.
#[inline]
pub(super) const fn year_flags(&self) -> YearFlags {
YearFlags((self.0 & 0b1111) as u8)
}
/// Returns the ordinal that corresponds to this `Mdf`, encoded as a value including year flags.
///
/// This does a table lookup to calculate the corresponding ordinal. It will return an error if
/// the `Mdl` turns out not to be a valid date.
///
/// # Errors
///
/// Returns `None` if `month == 0` or `day == 0`, or if a the given day does not exist in the
/// given month.
#[inline]
pub(super) const fn ordinal_and_flags(&self) -> Option<i32> {
let mdl = self.0 >> 3;
match MDL_TO_OL[mdl as usize] {
XX => None,
v => Some(self.0 as i32 - ((v as i32) << 3)),
}
}
#[cfg(test)]
fn valid(&self) -> bool {
let mdl = self.0 >> 3;
MDL_TO_OL[mdl as usize] > 0
}
}
impl fmt::Debug for Mdf {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let Mdf(mdf) = *self;
write!(
f,
"Mdf(({} << 9) | ({} << 4) | {:#04o} /*{:?}*/)",
mdf >> 9,
(mdf >> 4) & 0b1_1111,
mdf & 0b1111,
YearFlags((mdf & 0b1111) as u8)
)
}
}
#[cfg(test)]
mod tests {
use super::Mdf;
use super::{A, AG, B, BA, C, CB, D, DC, E, ED, F, FE, G, GF, YearFlags};
const NONLEAP_FLAGS: [YearFlags; 7] = [A, B, C, D, E, F, G];
const LEAP_FLAGS: [YearFlags; 7] = [AG, BA, CB, DC, ED, FE, GF];
const FLAGS: [YearFlags; 14] = [A, B, C, D, E, F, G, AG, BA, CB, DC, ED, FE, GF];
#[test]
fn test_year_flags_ndays_from_year() {
assert_eq!(YearFlags::from_year(2014).ndays(), 365);
assert_eq!(YearFlags::from_year(2012).ndays(), 366);
assert_eq!(YearFlags::from_year(2000).ndays(), 366);
assert_eq!(YearFlags::from_year(1900).ndays(), 365);
assert_eq!(YearFlags::from_year(1600).ndays(), 366);
assert_eq!(YearFlags::from_year(1).ndays(), 365);
assert_eq!(YearFlags::from_year(0).ndays(), 366); // 1 BCE (proleptic Gregorian)
assert_eq!(YearFlags::from_year(-1).ndays(), 365); // 2 BCE
assert_eq!(YearFlags::from_year(-4).ndays(), 366); // 5 BCE
assert_eq!(YearFlags::from_year(-99).ndays(), 365); // 100 BCE
assert_eq!(YearFlags::from_year(-100).ndays(), 365); // 101 BCE
assert_eq!(YearFlags::from_year(-399).ndays(), 365); // 400 BCE
assert_eq!(YearFlags::from_year(-400).ndays(), 366); // 401 BCE
}
#[test]
fn test_year_flags_nisoweeks() {
assert_eq!(A.nisoweeks(), 52);
assert_eq!(B.nisoweeks(), 52);
assert_eq!(C.nisoweeks(), 52);
assert_eq!(D.nisoweeks(), 53);
assert_eq!(E.nisoweeks(), 52);
assert_eq!(F.nisoweeks(), 52);
assert_eq!(G.nisoweeks(), 52);
assert_eq!(AG.nisoweeks(), 52);
assert_eq!(BA.nisoweeks(), 52);
assert_eq!(CB.nisoweeks(), 52);
assert_eq!(DC.nisoweeks(), 53);
assert_eq!(ED.nisoweeks(), 53);
assert_eq!(FE.nisoweeks(), 52);
assert_eq!(GF.nisoweeks(), 52);
}
#[test]
fn test_mdf_valid() {
fn check(expected: bool, flags: YearFlags, month1: u32, day1: u32, month2: u32, day2: u32) {
for month in month1..=month2 {
for day in day1..=day2 {
let mdf = match Mdf::new(month, day, flags) {
Some(mdf) => mdf,
None if !expected => continue,
None => panic!("Mdf::new({month}, {day}, {flags:?}) returned None"),
};
assert!(
mdf.valid() == expected,
"month {} day {} = {:?} should be {} for dominical year {:?}",
month,
day,
mdf,
if expected { "valid" } else { "invalid" },
flags
);
}
}
}
for &flags in NONLEAP_FLAGS.iter() {
check(false, flags, 0, 0, 0, 1024);
check(false, flags, 0, 0, 16, 0);
check(true, flags, 1, 1, 1, 31);
check(false, flags, 1, 32, 1, 1024);
check(true, flags, 2, 1, 2, 28);
check(false, flags, 2, 29, 2, 1024);
check(true, flags, 3, 1, 3, 31);
check(false, flags, 3, 32, 3, 1024);
check(true, flags, 4, 1, 4, 30);
check(false, flags, 4, 31, 4, 1024);
check(true, flags, 5, 1, 5, 31);
check(false, flags, 5, 32, 5, 1024);
check(true, flags, 6, 1, 6, 30);
check(false, flags, 6, 31, 6, 1024);
check(true, flags, 7, 1, 7, 31);
check(false, flags, 7, 32, 7, 1024);
check(true, flags, 8, 1, 8, 31);
check(false, flags, 8, 32, 8, 1024);
check(true, flags, 9, 1, 9, 30);
check(false, flags, 9, 31, 9, 1024);
check(true, flags, 10, 1, 10, 31);
check(false, flags, 10, 32, 10, 1024);
check(true, flags, 11, 1, 11, 30);
check(false, flags, 11, 31, 11, 1024);
check(true, flags, 12, 1, 12, 31);
check(false, flags, 12, 32, 12, 1024);
check(false, flags, 13, 0, 16, 1024);
check(false, flags, u32::MAX, 0, u32::MAX, 1024);
check(false, flags, 0, u32::MAX, 16, u32::MAX);
check(false, flags, u32::MAX, u32::MAX, u32::MAX, u32::MAX);
}
for &flags in LEAP_FLAGS.iter() {
check(false, flags, 0, 0, 0, 1024);
check(false, flags, 0, 0, 16, 0);
check(true, flags, 1, 1, 1, 31);
check(false, flags, 1, 32, 1, 1024);
check(true, flags, 2, 1, 2, 29);
check(false, flags, 2, 30, 2, 1024);
check(true, flags, 3, 1, 3, 31);
check(false, flags, 3, 32, 3, 1024);
check(true, flags, 4, 1, 4, 30);
check(false, flags, 4, 31, 4, 1024);
check(true, flags, 5, 1, 5, 31);
check(false, flags, 5, 32, 5, 1024);
check(true, flags, 6, 1, 6, 30);
check(false, flags, 6, 31, 6, 1024);
check(true, flags, 7, 1, 7, 31);
check(false, flags, 7, 32, 7, 1024);
check(true, flags, 8, 1, 8, 31);
check(false, flags, 8, 32, 8, 1024);
check(true, flags, 9, 1, 9, 30);
check(false, flags, 9, 31, 9, 1024);
check(true, flags, 10, 1, 10, 31);
check(false, flags, 10, 32, 10, 1024);
check(true, flags, 11, 1, 11, 30);
check(false, flags, 11, 31, 11, 1024);
check(true, flags, 12, 1, 12, 31);
check(false, flags, 12, 32, 12, 1024);
check(false, flags, 13, 0, 16, 1024);
check(false, flags, u32::MAX, 0, u32::MAX, 1024);
check(false, flags, 0, u32::MAX, 16, u32::MAX);
check(false, flags, u32::MAX, u32::MAX, u32::MAX, u32::MAX);
}
}
#[test]
fn test_mdf_fields() {
for &flags in FLAGS.iter() {
for month in 1u32..=12 {
for day in 1u32..31 {
let mdf = match Mdf::new(month, day, flags) {
Some(mdf) => mdf,
None => continue,
};
if mdf.valid() {
assert_eq!(mdf.month(), month);
assert_eq!(mdf.day(), day);
}
}
}
}
}
#[test]
fn test_mdf_with_fields() {
fn check(flags: YearFlags, month: u32, day: u32) {
let mdf = Mdf::new(month, day, flags).unwrap();
for month in 0u32..=16 {
let mdf = match mdf.with_month(month) {
Some(mdf) => mdf,
None if month > 12 => continue,
None => panic!("failed to create Mdf with month {month}"),
};
if mdf.valid() {
assert_eq!(mdf.month(), month);
assert_eq!(mdf.day(), day);
}
}
for day in 0u32..=1024 {
let mdf = match mdf.with_day(day) {
Some(mdf) => mdf,
None if day > 31 => continue,
None => panic!("failed to create Mdf with month {month}"),
};
if mdf.valid() {
assert_eq!(mdf.month(), month);
assert_eq!(mdf.day(), day);
}
}
}
for &flags in NONLEAP_FLAGS.iter() {
check(flags, 1, 1);
check(flags, 1, 31);
check(flags, 2, 1);
check(flags, 2, 28);
check(flags, 2, 29);
check(flags, 12, 31);
}
for &flags in LEAP_FLAGS.iter() {
check(flags, 1, 1);
check(flags, 1, 31);
check(flags, 2, 1);
check(flags, 2, 29);
check(flags, 2, 30);
check(flags, 12, 31);
}
}
#[test]
fn test_mdf_new_range() {
let flags = YearFlags::from_year(2023);
assert!(Mdf::new(13, 1, flags).is_none());
assert!(Mdf::new(1, 32, flags).is_none());
}
}

View File

@@ -0,0 +1,233 @@
// This is a part of Chrono.
// See README.md and LICENSE.txt for details.
//! ISO 8601 week.
use core::fmt;
use super::internals::YearFlags;
#[cfg(any(feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"))]
use rkyv::{Archive, Deserialize, Serialize};
/// ISO 8601 week.
///
/// This type, combined with [`Weekday`](../enum.Weekday.html),
/// constitutes the ISO 8601 [week date](./struct.NaiveDate.html#week-date).
/// One can retrieve this type from the existing [`Datelike`](../trait.Datelike.html) types
/// via the [`Datelike::iso_week`](../trait.Datelike.html#tymethod.iso_week) method.
#[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone, Hash)]
#[cfg_attr(
any(feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq, PartialOrd)),
rkyv(attr(derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
pub struct IsoWeek {
// Note that this allows for larger year range than `NaiveDate`.
// This is crucial because we have an edge case for the first and last week supported,
// which year number might not match the calendar year number.
ywf: i32, // (year << 10) | (week << 4) | flag
}
impl IsoWeek {
/// Returns the corresponding `IsoWeek` from the year and the `Of` internal value.
//
// Internal use only. We don't expose the public constructor for `IsoWeek` for now
// because the year range for the week date and the calendar date do not match, and
// it is confusing to have a date that is out of range in one and not in another.
// Currently we sidestep this issue by making `IsoWeek` fully dependent of `Datelike`.
pub(super) fn from_yof(year: i32, ordinal: u32, year_flags: YearFlags) -> Self {
let rawweek = (ordinal + year_flags.isoweek_delta()) / 7;
let (year, week) = if rawweek < 1 {
// previous year
let prevlastweek = YearFlags::from_year(year - 1).nisoweeks();
(year - 1, prevlastweek)
} else {
let lastweek = year_flags.nisoweeks();
if rawweek > lastweek {
// next year
(year + 1, 1)
} else {
(year, rawweek)
}
};
let flags = YearFlags::from_year(year);
IsoWeek { ywf: (year << 10) | (week << 4) as i32 | i32::from(flags.0) }
}
/// Returns the year number for this ISO week.
///
/// # Example
///
/// ```
/// use chrono::{Datelike, NaiveDate, Weekday};
///
/// let d = NaiveDate::from_isoywd_opt(2015, 1, Weekday::Mon).unwrap();
/// assert_eq!(d.iso_week().year(), 2015);
/// ```
///
/// This year number might not match the calendar year number.
/// Continuing the example...
///
/// ```
/// # use chrono::{NaiveDate, Datelike, Weekday};
/// # let d = NaiveDate::from_isoywd_opt(2015, 1, Weekday::Mon).unwrap();
/// assert_eq!(d.year(), 2014);
/// assert_eq!(d, NaiveDate::from_ymd_opt(2014, 12, 29).unwrap());
/// ```
#[inline]
pub const fn year(&self) -> i32 {
self.ywf >> 10
}
/// Returns the ISO week number starting from 1.
///
/// The return value ranges from 1 to 53. (The last week of year differs by years.)
///
/// # Example
///
/// ```
/// use chrono::{Datelike, NaiveDate, Weekday};
///
/// let d = NaiveDate::from_isoywd_opt(2015, 15, Weekday::Mon).unwrap();
/// assert_eq!(d.iso_week().week(), 15);
/// ```
#[inline]
pub const fn week(&self) -> u32 {
((self.ywf >> 4) & 0x3f) as u32
}
/// Returns the ISO week number starting from 0.
///
/// The return value ranges from 0 to 52. (The last week of year differs by years.)
///
/// # Example
///
/// ```
/// use chrono::{Datelike, NaiveDate, Weekday};
///
/// let d = NaiveDate::from_isoywd_opt(2015, 15, Weekday::Mon).unwrap();
/// assert_eq!(d.iso_week().week0(), 14);
/// ```
#[inline]
pub const fn week0(&self) -> u32 {
((self.ywf >> 4) & 0x3f) as u32 - 1
}
}
/// The `Debug` output of the ISO week `w` is the same as
/// [`d.format("%G-W%V")`](../format/strftime/index.html)
/// where `d` is any `NaiveDate` value in that week.
///
/// # Example
///
/// ```
/// use chrono::{Datelike, NaiveDate};
///
/// assert_eq!(
/// format!("{:?}", NaiveDate::from_ymd_opt(2015, 9, 5).unwrap().iso_week()),
/// "2015-W36"
/// );
/// assert_eq!(format!("{:?}", NaiveDate::from_ymd_opt(0, 1, 3).unwrap().iso_week()), "0000-W01");
/// assert_eq!(
/// format!("{:?}", NaiveDate::from_ymd_opt(9999, 12, 31).unwrap().iso_week()),
/// "9999-W52"
/// );
/// ```
///
/// ISO 8601 requires an explicit sign for years before 1 BCE or after 9999 CE.
///
/// ```
/// # use chrono::{NaiveDate, Datelike};
/// assert_eq!(format!("{:?}", NaiveDate::from_ymd_opt(0, 1, 2).unwrap().iso_week()), "-0001-W52");
/// assert_eq!(
/// format!("{:?}", NaiveDate::from_ymd_opt(10000, 12, 31).unwrap().iso_week()),
/// "+10000-W52"
/// );
/// ```
impl fmt::Debug for IsoWeek {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let year = self.year();
let week = self.week();
if (0..=9999).contains(&year) {
write!(f, "{year:04}-W{week:02}")
} else {
// ISO 8601 requires the explicit sign for out-of-range years
write!(f, "{year:+05}-W{week:02}")
}
}
}
#[cfg(test)]
mod tests {
#[cfg(feature = "rkyv-validation")]
use super::IsoWeek;
use crate::Datelike;
use crate::naive::date::{self, NaiveDate};
#[test]
fn test_iso_week_extremes() {
let minweek = NaiveDate::MIN.iso_week();
let maxweek = NaiveDate::MAX.iso_week();
assert_eq!(minweek.year(), date::MIN_YEAR);
assert_eq!(minweek.week(), 1);
assert_eq!(minweek.week0(), 0);
#[cfg(feature = "alloc")]
assert_eq!(format!("{:?}", minweek), NaiveDate::MIN.format("%G-W%V").to_string());
assert_eq!(maxweek.year(), date::MAX_YEAR + 1);
assert_eq!(maxweek.week(), 1);
assert_eq!(maxweek.week0(), 0);
#[cfg(feature = "alloc")]
assert_eq!(format!("{:?}", maxweek), NaiveDate::MAX.format("%G-W%V").to_string());
}
#[test]
fn test_iso_week_equivalence_for_first_week() {
let monday = NaiveDate::from_ymd_opt(2024, 12, 30).unwrap();
let friday = NaiveDate::from_ymd_opt(2025, 1, 3).unwrap();
assert_eq!(monday.iso_week(), friday.iso_week());
}
#[test]
fn test_iso_week_equivalence_for_last_week() {
let monday = NaiveDate::from_ymd_opt(2026, 12, 28).unwrap();
let friday = NaiveDate::from_ymd_opt(2027, 1, 1).unwrap();
assert_eq!(monday.iso_week(), friday.iso_week());
}
#[test]
fn test_iso_week_ordering_for_first_week() {
let monday = NaiveDate::from_ymd_opt(2024, 12, 30).unwrap();
let friday = NaiveDate::from_ymd_opt(2025, 1, 3).unwrap();
assert!(monday.iso_week() >= friday.iso_week());
assert!(monday.iso_week() <= friday.iso_week());
}
#[test]
fn test_iso_week_ordering_for_last_week() {
let monday = NaiveDate::from_ymd_opt(2026, 12, 28).unwrap();
let friday = NaiveDate::from_ymd_opt(2027, 1, 1).unwrap();
assert!(monday.iso_week() >= friday.iso_week());
assert!(monday.iso_week() <= friday.iso_week());
}
#[test]
#[cfg(feature = "rkyv-validation")]
fn test_rkyv_validation() {
let minweek = NaiveDate::MIN.iso_week();
let bytes = rkyv::to_bytes::<_, 4>(&minweek).unwrap();
assert_eq!(rkyv::from_bytes::<IsoWeek>(&bytes).unwrap(), minweek);
let maxweek = NaiveDate::MAX.iso_week();
let bytes = rkyv::to_bytes::<_, 4>(&maxweek).unwrap();
assert_eq!(rkyv::from_bytes::<IsoWeek>(&bytes).unwrap(), maxweek);
}
}

View File

@@ -0,0 +1,335 @@
//! Date and time types unconcerned with timezones.
//!
//! They are primarily building blocks for other types
//! (e.g. [`TimeZone`](../offset/trait.TimeZone.html)),
//! but can be also used for the simpler date and time handling.
use core::hash::{Hash, Hasher};
use core::ops::RangeInclusive;
use crate::Weekday;
use crate::expect;
pub(crate) mod date;
pub(crate) mod datetime;
mod internals;
pub(crate) mod isoweek;
pub(crate) mod time;
#[allow(deprecated)]
pub use self::date::{MAX_DATE, MIN_DATE};
pub use self::date::{NaiveDate, NaiveDateDaysIterator, NaiveDateWeeksIterator};
#[allow(deprecated)]
pub use self::datetime::{MAX_DATETIME, MIN_DATETIME, NaiveDateTime};
pub use self::isoweek::IsoWeek;
pub use self::time::NaiveTime;
#[cfg(feature = "__internal_bench")]
#[doc(hidden)]
pub use self::internals::YearFlags as __BenchYearFlags;
/// A week represented by a [`NaiveDate`] and a [`Weekday`] which is the first
/// day of the week.
#[derive(Clone, Copy, Debug, Eq)]
pub struct NaiveWeek {
date: NaiveDate,
start: Weekday,
}
impl NaiveWeek {
/// Create a new `NaiveWeek`
pub(crate) const fn new(date: NaiveDate, start: Weekday) -> Self {
Self { date, start }
}
/// Returns a date representing the first day of the week.
///
/// # Panics
///
/// Panics if the first day of the week happens to fall just out of range of `NaiveDate`
/// (more than ca. 262,000 years away from common era).
///
/// # Examples
///
/// ```
/// use chrono::{NaiveDate, Weekday};
///
/// let date = NaiveDate::from_ymd_opt(2022, 4, 18).unwrap();
/// let week = date.week(Weekday::Mon);
/// assert!(week.first_day() <= date);
/// ```
#[inline]
#[must_use]
pub const fn first_day(&self) -> NaiveDate {
expect(self.checked_first_day(), "first weekday out of range for `NaiveDate`")
}
/// Returns a date representing the first day of the week or
/// `None` if the date is out of `NaiveDate`'s range
/// (more than ca. 262,000 years away from common era).
///
/// # Examples
///
/// ```
/// use chrono::{NaiveDate, Weekday};
///
/// let date = NaiveDate::MIN;
/// let week = date.week(Weekday::Mon);
/// if let Some(first_day) = week.checked_first_day() {
/// assert!(first_day == date);
/// } else {
/// // error handling code
/// return;
/// };
/// ```
#[inline]
#[must_use]
pub const fn checked_first_day(&self) -> Option<NaiveDate> {
let start = self.start.num_days_from_monday() as i32;
let ref_day = self.date.weekday().num_days_from_monday() as i32;
// Calculate the number of days to subtract from `self.date`.
// Do not construct an intermediate date beyond `self.date`, because that may be out of
// range if `date` is close to `NaiveDate::MAX`.
let days = start - ref_day - if start > ref_day { 7 } else { 0 };
self.date.add_days(days)
}
/// Returns a date representing the last day of the week.
///
/// # Panics
///
/// Panics if the last day of the week happens to fall just out of range of `NaiveDate`
/// (more than ca. 262,000 years away from common era).
///
/// # Examples
///
/// ```
/// use chrono::{NaiveDate, Weekday};
///
/// let date = NaiveDate::from_ymd_opt(2022, 4, 18).unwrap();
/// let week = date.week(Weekday::Mon);
/// assert!(week.last_day() >= date);
/// ```
#[inline]
#[must_use]
pub const fn last_day(&self) -> NaiveDate {
expect(self.checked_last_day(), "last weekday out of range for `NaiveDate`")
}
/// Returns a date representing the last day of the week or
/// `None` if the date is out of `NaiveDate`'s range
/// (more than ca. 262,000 years away from common era).
///
/// # Examples
///
/// ```
/// use chrono::{NaiveDate, Weekday};
///
/// let date = NaiveDate::MAX;
/// let week = date.week(Weekday::Mon);
/// if let Some(last_day) = week.checked_last_day() {
/// assert!(last_day == date);
/// } else {
/// // error handling code
/// return;
/// };
/// ```
#[inline]
#[must_use]
pub const fn checked_last_day(&self) -> Option<NaiveDate> {
let end = self.start.pred().num_days_from_monday() as i32;
let ref_day = self.date.weekday().num_days_from_monday() as i32;
// Calculate the number of days to add to `self.date`.
// Do not construct an intermediate date before `self.date` (like with `first_day()`),
// because that may be out of range if `date` is close to `NaiveDate::MIN`.
let days = end - ref_day + if end < ref_day { 7 } else { 0 };
self.date.add_days(days)
}
/// Returns a [`RangeInclusive<T>`] representing the whole week bounded by
/// [first_day](NaiveWeek::first_day) and [last_day](NaiveWeek::last_day) functions.
///
/// # Panics
///
/// Panics if the either the first or last day of the week happens to fall just out of range of
/// `NaiveDate` (more than ca. 262,000 years away from common era).
///
/// # Examples
///
/// ```
/// use chrono::{NaiveDate, Weekday};
///
/// let date = NaiveDate::from_ymd_opt(2022, 4, 18).unwrap();
/// let week = date.week(Weekday::Mon);
/// let days = week.days();
/// assert!(days.contains(&date));
/// ```
#[inline]
#[must_use]
pub const fn days(&self) -> RangeInclusive<NaiveDate> {
// `expect` doesn't work because `RangeInclusive` is not `Copy`
match self.checked_days() {
Some(val) => val,
None => panic!("{}", "first or last weekday is out of range for `NaiveDate`"),
}
}
/// Returns an [`Option<RangeInclusive<T>>`] representing the whole week bounded by
/// [checked_first_day](NaiveWeek::checked_first_day) and
/// [checked_last_day](NaiveWeek::checked_last_day) functions.
///
/// Returns `None` if either of the boundaries are out of `NaiveDate`'s range
/// (more than ca. 262,000 years away from common era).
///
///
/// # Examples
///
/// ```
/// use chrono::{NaiveDate, Weekday};
///
/// let date = NaiveDate::MAX;
/// let week = date.week(Weekday::Mon);
/// let _days = match week.checked_days() {
/// Some(d) => d,
/// None => {
/// // error handling code
/// return;
/// }
/// };
/// ```
#[inline]
#[must_use]
pub const fn checked_days(&self) -> Option<RangeInclusive<NaiveDate>> {
match (self.checked_first_day(), self.checked_last_day()) {
(Some(first), Some(last)) => Some(first..=last),
(_, _) => None,
}
}
}
impl PartialEq for NaiveWeek {
fn eq(&self, other: &Self) -> bool {
self.first_day() == other.first_day()
}
}
impl Hash for NaiveWeek {
fn hash<H: Hasher>(&self, state: &mut H) {
self.first_day().hash(state);
}
}
/// A duration in calendar days.
///
/// This is useful because when using `TimeDelta` it is possible that adding `TimeDelta::days(1)`
/// doesn't increment the day value as expected due to it being a fixed number of seconds. This
/// difference applies only when dealing with `DateTime<TimeZone>` data types and in other cases
/// `TimeDelta::days(n)` and `Days::new(n)` are equivalent.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]
pub struct Days(pub(crate) u64);
impl Days {
/// Construct a new `Days` from a number of days
pub const fn new(num: u64) -> Self {
Self(num)
}
}
/// Serialization/Deserialization of `NaiveDateTime` in alternate formats
///
/// The various modules in here are intended to be used with serde's [`with` annotation] to
/// serialize as something other than the default ISO 8601 format.
///
/// [`with` annotation]: https://serde.rs/field-attrs.html#with
#[cfg(feature = "serde")]
pub mod serde {
pub use super::datetime::serde::*;
}
#[cfg(test)]
mod test {
use crate::{NaiveDate, NaiveWeek, Weekday};
use std::hash::{DefaultHasher, Hash, Hasher};
#[test]
fn test_naiveweek() {
let date = NaiveDate::from_ymd_opt(2022, 5, 18).unwrap();
let asserts = [
(Weekday::Mon, "Mon 2022-05-16", "Sun 2022-05-22"),
(Weekday::Tue, "Tue 2022-05-17", "Mon 2022-05-23"),
(Weekday::Wed, "Wed 2022-05-18", "Tue 2022-05-24"),
(Weekday::Thu, "Thu 2022-05-12", "Wed 2022-05-18"),
(Weekday::Fri, "Fri 2022-05-13", "Thu 2022-05-19"),
(Weekday::Sat, "Sat 2022-05-14", "Fri 2022-05-20"),
(Weekday::Sun, "Sun 2022-05-15", "Sat 2022-05-21"),
];
for (start, first_day, last_day) in asserts {
let week = date.week(start);
let days = week.days();
assert_eq!(Ok(week.first_day()), NaiveDate::parse_from_str(first_day, "%a %Y-%m-%d"));
assert_eq!(Ok(week.last_day()), NaiveDate::parse_from_str(last_day, "%a %Y-%m-%d"));
assert!(days.contains(&date));
}
}
#[test]
fn test_naiveweek_min_max() {
let date_max = NaiveDate::MAX;
assert!(date_max.week(Weekday::Mon).first_day() <= date_max);
let date_min = NaiveDate::MIN;
assert!(date_min.week(Weekday::Mon).last_day() >= date_min);
}
#[test]
fn test_naiveweek_checked_no_panic() {
let date_max = NaiveDate::MAX;
if let Some(last) = date_max.week(Weekday::Mon).checked_last_day() {
assert!(last == date_max);
}
let date_min = NaiveDate::MIN;
if let Some(first) = date_min.week(Weekday::Mon).checked_first_day() {
assert!(first == date_min);
}
let _ = date_min.week(Weekday::Mon).checked_days();
let _ = date_max.week(Weekday::Mon).checked_days();
}
#[test]
fn test_naiveweek_eq() {
let a =
NaiveWeek { date: NaiveDate::from_ymd_opt(2025, 4, 3).unwrap(), start: Weekday::Mon };
let b =
NaiveWeek { date: NaiveDate::from_ymd_opt(2025, 4, 4).unwrap(), start: Weekday::Mon };
assert_eq!(a, b);
let c =
NaiveWeek { date: NaiveDate::from_ymd_opt(2025, 4, 3).unwrap(), start: Weekday::Sun };
assert_ne!(a, c);
assert_ne!(b, c);
}
#[test]
fn test_naiveweek_hash() {
let a =
NaiveWeek { date: NaiveDate::from_ymd_opt(2025, 4, 3).unwrap(), start: Weekday::Mon };
let b =
NaiveWeek { date: NaiveDate::from_ymd_opt(2025, 4, 4).unwrap(), start: Weekday::Mon };
let c =
NaiveWeek { date: NaiveDate::from_ymd_opt(2025, 4, 3).unwrap(), start: Weekday::Sun };
let mut hasher = DefaultHasher::default();
a.hash(&mut hasher);
let a_hash = hasher.finish();
hasher = DefaultHasher::default();
b.hash(&mut hasher);
let b_hash = hasher.finish();
hasher = DefaultHasher::default();
c.hash(&mut hasher);
let c_hash = hasher.finish();
assert_eq!(a_hash, b_hash);
assert_ne!(b_hash, c_hash);
assert_ne!(a_hash, c_hash);
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,143 @@
use super::NaiveTime;
use core::fmt;
use serde::{de, ser};
// TODO not very optimized for space (binary formats would want something better)
// TODO round-trip for general leap seconds (not just those with second = 60)
impl ser::Serialize for NaiveTime {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
serializer.collect_str(&self)
}
}
struct NaiveTimeVisitor;
impl de::Visitor<'_> for NaiveTimeVisitor {
type Value = NaiveTime;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a formatted time string")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
value.parse().map_err(E::custom)
}
}
impl<'de> de::Deserialize<'de> for NaiveTime {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
deserializer.deserialize_str(NaiveTimeVisitor)
}
}
#[cfg(test)]
mod tests {
use crate::NaiveTime;
#[test]
fn test_serde_serialize() {
assert_eq!(
serde_json::to_string(&NaiveTime::from_hms_opt(0, 0, 0).unwrap()).ok(),
Some(r#""00:00:00""#.into())
);
assert_eq!(
serde_json::to_string(&NaiveTime::from_hms_milli_opt(0, 0, 0, 950).unwrap()).ok(),
Some(r#""00:00:00.950""#.into())
);
assert_eq!(
serde_json::to_string(&NaiveTime::from_hms_milli_opt(0, 0, 59, 1_000).unwrap()).ok(),
Some(r#""00:00:60""#.into())
);
assert_eq!(
serde_json::to_string(&NaiveTime::from_hms_opt(0, 1, 2).unwrap()).ok(),
Some(r#""00:01:02""#.into())
);
assert_eq!(
serde_json::to_string(&NaiveTime::from_hms_nano_opt(3, 5, 7, 98765432).unwrap()).ok(),
Some(r#""03:05:07.098765432""#.into())
);
assert_eq!(
serde_json::to_string(&NaiveTime::from_hms_opt(7, 8, 9).unwrap()).ok(),
Some(r#""07:08:09""#.into())
);
assert_eq!(
serde_json::to_string(&NaiveTime::from_hms_micro_opt(12, 34, 56, 789).unwrap()).ok(),
Some(r#""12:34:56.000789""#.into())
);
let leap = NaiveTime::from_hms_nano_opt(23, 59, 59, 1_999_999_999).unwrap();
assert_eq!(serde_json::to_string(&leap).ok(), Some(r#""23:59:60.999999999""#.into()));
}
#[test]
fn test_serde_deserialize() {
let from_str = serde_json::from_str::<NaiveTime>;
assert_eq!(from_str(r#""00:00:00""#).ok(), Some(NaiveTime::from_hms_opt(0, 0, 0).unwrap()));
assert_eq!(from_str(r#""0:0:0""#).ok(), Some(NaiveTime::from_hms_opt(0, 0, 0).unwrap()));
assert_eq!(
from_str(r#""00:00:00.950""#).ok(),
Some(NaiveTime::from_hms_milli_opt(0, 0, 0, 950).unwrap())
);
assert_eq!(
from_str(r#""0:0:0.95""#).ok(),
Some(NaiveTime::from_hms_milli_opt(0, 0, 0, 950).unwrap())
);
assert_eq!(
from_str(r#""00:00:60""#).ok(),
Some(NaiveTime::from_hms_milli_opt(0, 0, 59, 1_000).unwrap())
);
assert_eq!(from_str(r#""00:01:02""#).ok(), Some(NaiveTime::from_hms_opt(0, 1, 2).unwrap()));
assert_eq!(
from_str(r#""03:05:07.098765432""#).ok(),
Some(NaiveTime::from_hms_nano_opt(3, 5, 7, 98765432).unwrap())
);
assert_eq!(from_str(r#""07:08:09""#).ok(), Some(NaiveTime::from_hms_opt(7, 8, 9).unwrap()));
assert_eq!(
from_str(r#""12:34:56.000789""#).ok(),
Some(NaiveTime::from_hms_micro_opt(12, 34, 56, 789).unwrap())
);
assert_eq!(
from_str(r#""23:59:60.999999999""#).ok(),
Some(NaiveTime::from_hms_nano_opt(23, 59, 59, 1_999_999_999).unwrap())
);
assert_eq!(
from_str(r#""23:59:60.9999999999997""#).ok(), // excess digits are ignored
Some(NaiveTime::from_hms_nano_opt(23, 59, 59, 1_999_999_999).unwrap())
);
// bad formats
assert!(from_str(r#""""#).is_err());
assert!(from_str(r#""000000""#).is_err());
assert!(from_str(r#""00:00:61""#).is_err());
assert!(from_str(r#""00:60:00""#).is_err());
assert!(from_str(r#""24:00:00""#).is_err());
assert!(from_str(r#""23:59:59,1""#).is_err());
assert!(from_str(r#""012:34:56""#).is_err());
assert!(from_str(r#""hh:mm:ss""#).is_err());
assert!(from_str(r#"0"#).is_err());
assert!(from_str(r#"86399"#).is_err());
assert!(from_str(r#"{}"#).is_err());
}
#[test]
fn test_serde_bincode() {
// Bincode is relevant to test separately from JSON because
// it is not self-describing.
use bincode::{deserialize, serialize};
let t = NaiveTime::from_hms_nano_opt(3, 5, 7, 98765432).unwrap();
let encoded = serialize(&t).unwrap();
let decoded: NaiveTime = deserialize(&encoded).unwrap();
assert_eq!(t, decoded);
}
}

View File

@@ -0,0 +1,393 @@
use super::NaiveTime;
use crate::{FixedOffset, TimeDelta, Timelike};
#[test]
fn test_time_from_hms_milli() {
assert_eq!(
NaiveTime::from_hms_milli_opt(3, 5, 7, 0),
Some(NaiveTime::from_hms_nano_opt(3, 5, 7, 0).unwrap())
);
assert_eq!(
NaiveTime::from_hms_milli_opt(3, 5, 7, 777),
Some(NaiveTime::from_hms_nano_opt(3, 5, 7, 777_000_000).unwrap())
);
assert_eq!(
NaiveTime::from_hms_milli_opt(3, 5, 59, 1_999),
Some(NaiveTime::from_hms_nano_opt(3, 5, 59, 1_999_000_000).unwrap())
);
assert_eq!(NaiveTime::from_hms_milli_opt(3, 5, 59, 2_000), None);
assert_eq!(NaiveTime::from_hms_milli_opt(3, 5, 59, 5_000), None); // overflow check
assert_eq!(NaiveTime::from_hms_milli_opt(3, 5, 59, u32::MAX), None);
}
#[test]
fn test_time_from_hms_micro() {
assert_eq!(
NaiveTime::from_hms_micro_opt(3, 5, 7, 0),
Some(NaiveTime::from_hms_nano_opt(3, 5, 7, 0).unwrap())
);
assert_eq!(
NaiveTime::from_hms_micro_opt(3, 5, 7, 333),
Some(NaiveTime::from_hms_nano_opt(3, 5, 7, 333_000).unwrap())
);
assert_eq!(
NaiveTime::from_hms_micro_opt(3, 5, 7, 777_777),
Some(NaiveTime::from_hms_nano_opt(3, 5, 7, 777_777_000).unwrap())
);
assert_eq!(
NaiveTime::from_hms_micro_opt(3, 5, 59, 1_999_999),
Some(NaiveTime::from_hms_nano_opt(3, 5, 59, 1_999_999_000).unwrap())
);
assert_eq!(NaiveTime::from_hms_micro_opt(3, 5, 59, 2_000_000), None);
assert_eq!(NaiveTime::from_hms_micro_opt(3, 5, 59, 5_000_000), None); // overflow check
assert_eq!(NaiveTime::from_hms_micro_opt(3, 5, 59, u32::MAX), None);
}
#[test]
fn test_time_hms() {
assert_eq!(NaiveTime::from_hms_opt(3, 5, 7).unwrap().hour(), 3);
assert_eq!(
NaiveTime::from_hms_opt(3, 5, 7).unwrap().with_hour(0),
Some(NaiveTime::from_hms_opt(0, 5, 7).unwrap())
);
assert_eq!(
NaiveTime::from_hms_opt(3, 5, 7).unwrap().with_hour(23),
Some(NaiveTime::from_hms_opt(23, 5, 7).unwrap())
);
assert_eq!(NaiveTime::from_hms_opt(3, 5, 7).unwrap().with_hour(24), None);
assert_eq!(NaiveTime::from_hms_opt(3, 5, 7).unwrap().with_hour(u32::MAX), None);
assert_eq!(NaiveTime::from_hms_opt(3, 5, 7).unwrap().minute(), 5);
assert_eq!(
NaiveTime::from_hms_opt(3, 5, 7).unwrap().with_minute(0),
Some(NaiveTime::from_hms_opt(3, 0, 7).unwrap())
);
assert_eq!(
NaiveTime::from_hms_opt(3, 5, 7).unwrap().with_minute(59),
Some(NaiveTime::from_hms_opt(3, 59, 7).unwrap())
);
assert_eq!(NaiveTime::from_hms_opt(3, 5, 7).unwrap().with_minute(60), None);
assert_eq!(NaiveTime::from_hms_opt(3, 5, 7).unwrap().with_minute(u32::MAX), None);
assert_eq!(NaiveTime::from_hms_opt(3, 5, 7).unwrap().second(), 7);
assert_eq!(
NaiveTime::from_hms_opt(3, 5, 7).unwrap().with_second(0),
Some(NaiveTime::from_hms_opt(3, 5, 0).unwrap())
);
assert_eq!(
NaiveTime::from_hms_opt(3, 5, 7).unwrap().with_second(59),
Some(NaiveTime::from_hms_opt(3, 5, 59).unwrap())
);
assert_eq!(NaiveTime::from_hms_opt(3, 5, 7).unwrap().with_second(60), None);
assert_eq!(NaiveTime::from_hms_opt(3, 5, 7).unwrap().with_second(u32::MAX), None);
}
#[test]
fn test_time_add() {
macro_rules! check {
($lhs:expr, $rhs:expr, $sum:expr) => {{
assert_eq!($lhs + $rhs, $sum);
//assert_eq!($rhs + $lhs, $sum);
}};
}
let hmsm = |h, m, s, ms| NaiveTime::from_hms_milli_opt(h, m, s, ms).unwrap();
check!(hmsm(3, 5, 59, 900), TimeDelta::zero(), hmsm(3, 5, 59, 900));
check!(hmsm(3, 5, 59, 900), TimeDelta::try_milliseconds(100).unwrap(), hmsm(3, 6, 0, 0));
check!(hmsm(3, 5, 59, 1_300), TimeDelta::try_milliseconds(-1800).unwrap(), hmsm(3, 5, 58, 500));
check!(hmsm(3, 5, 59, 1_300), TimeDelta::try_milliseconds(-800).unwrap(), hmsm(3, 5, 59, 500));
check!(
hmsm(3, 5, 59, 1_300),
TimeDelta::try_milliseconds(-100).unwrap(),
hmsm(3, 5, 59, 1_200)
);
check!(hmsm(3, 5, 59, 1_300), TimeDelta::try_milliseconds(100).unwrap(), hmsm(3, 5, 59, 1_400));
check!(hmsm(3, 5, 59, 1_300), TimeDelta::try_milliseconds(800).unwrap(), hmsm(3, 6, 0, 100));
check!(hmsm(3, 5, 59, 1_300), TimeDelta::try_milliseconds(1800).unwrap(), hmsm(3, 6, 1, 100));
check!(hmsm(3, 5, 59, 900), TimeDelta::try_seconds(86399).unwrap(), hmsm(3, 5, 58, 900)); // overwrap
check!(hmsm(3, 5, 59, 900), TimeDelta::try_seconds(-86399).unwrap(), hmsm(3, 6, 0, 900));
check!(hmsm(3, 5, 59, 900), TimeDelta::try_days(12345).unwrap(), hmsm(3, 5, 59, 900));
check!(hmsm(3, 5, 59, 1_300), TimeDelta::try_days(1).unwrap(), hmsm(3, 5, 59, 300));
check!(hmsm(3, 5, 59, 1_300), TimeDelta::try_days(-1).unwrap(), hmsm(3, 6, 0, 300));
// regression tests for #37
check!(hmsm(0, 0, 0, 0), TimeDelta::try_milliseconds(-990).unwrap(), hmsm(23, 59, 59, 10));
check!(hmsm(0, 0, 0, 0), TimeDelta::try_milliseconds(-9990).unwrap(), hmsm(23, 59, 50, 10));
}
#[test]
fn test_time_overflowing_add() {
let hmsm = |h, m, s, ms| NaiveTime::from_hms_milli_opt(h, m, s, ms).unwrap();
assert_eq!(
hmsm(3, 4, 5, 678).overflowing_add_signed(TimeDelta::try_hours(11).unwrap()),
(hmsm(14, 4, 5, 678), 0)
);
assert_eq!(
hmsm(3, 4, 5, 678).overflowing_add_signed(TimeDelta::try_hours(23).unwrap()),
(hmsm(2, 4, 5, 678), 86_400)
);
assert_eq!(
hmsm(3, 4, 5, 678).overflowing_add_signed(TimeDelta::try_hours(-7).unwrap()),
(hmsm(20, 4, 5, 678), -86_400)
);
// overflowing_add_signed with leap seconds may be counter-intuitive
assert_eq!(
hmsm(3, 4, 59, 1_678).overflowing_add_signed(TimeDelta::try_days(1).unwrap()),
(hmsm(3, 4, 59, 678), 86_400)
);
assert_eq!(
hmsm(3, 4, 59, 1_678).overflowing_add_signed(TimeDelta::try_days(-1).unwrap()),
(hmsm(3, 5, 0, 678), -86_400)
);
}
#[test]
fn test_time_addassignment() {
let hms = |h, m, s| NaiveTime::from_hms_opt(h, m, s).unwrap();
let mut time = hms(12, 12, 12);
time += TimeDelta::try_hours(10).unwrap();
assert_eq!(time, hms(22, 12, 12));
time += TimeDelta::try_hours(10).unwrap();
assert_eq!(time, hms(8, 12, 12));
}
#[test]
fn test_time_subassignment() {
let hms = |h, m, s| NaiveTime::from_hms_opt(h, m, s).unwrap();
let mut time = hms(12, 12, 12);
time -= TimeDelta::try_hours(10).unwrap();
assert_eq!(time, hms(2, 12, 12));
time -= TimeDelta::try_hours(10).unwrap();
assert_eq!(time, hms(16, 12, 12));
}
#[test]
fn test_time_sub() {
macro_rules! check {
($lhs:expr, $rhs:expr, $diff:expr) => {{
// `time1 - time2 = duration` is equivalent to `time2 - time1 = -duration`
assert_eq!($lhs.signed_duration_since($rhs), $diff);
assert_eq!($rhs.signed_duration_since($lhs), -$diff);
}};
}
let hmsm = |h, m, s, ms| NaiveTime::from_hms_milli_opt(h, m, s, ms).unwrap();
check!(hmsm(3, 5, 7, 900), hmsm(3, 5, 7, 900), TimeDelta::zero());
check!(hmsm(3, 5, 7, 900), hmsm(3, 5, 7, 600), TimeDelta::try_milliseconds(300).unwrap());
check!(hmsm(3, 5, 7, 200), hmsm(2, 4, 6, 200), TimeDelta::try_seconds(3600 + 60 + 1).unwrap());
check!(
hmsm(3, 5, 7, 200),
hmsm(2, 4, 6, 300),
TimeDelta::try_seconds(3600 + 60).unwrap() + TimeDelta::try_milliseconds(900).unwrap()
);
// treats the leap second as if it coincides with the prior non-leap second,
// as required by `time1 - time2 = duration` and `time2 - time1 = -duration` equivalence.
check!(hmsm(3, 6, 0, 200), hmsm(3, 5, 59, 1_800), TimeDelta::try_milliseconds(400).unwrap());
//check!(hmsm(3, 5, 7, 1_200), hmsm(3, 5, 6, 1_800), TimeDelta::try_milliseconds(1400).unwrap());
//check!(hmsm(3, 5, 7, 1_200), hmsm(3, 5, 6, 800), TimeDelta::try_milliseconds(1400).unwrap());
// additional equality: `time1 + duration = time2` is equivalent to
// `time2 - time1 = duration` IF AND ONLY IF `time2` represents a non-leap second.
assert_eq!(hmsm(3, 5, 6, 800) + TimeDelta::try_milliseconds(400).unwrap(), hmsm(3, 5, 7, 200));
//assert_eq!(hmsm(3, 5, 6, 1_800) + TimeDelta::try_milliseconds(400).unwrap(), hmsm(3, 5, 7, 200));
}
#[test]
fn test_core_duration_ops() {
use core::time::Duration;
let mut t = NaiveTime::from_hms_opt(11, 34, 23).unwrap();
let same = t + Duration::ZERO;
assert_eq!(t, same);
t += Duration::new(3600, 0);
assert_eq!(t, NaiveTime::from_hms_opt(12, 34, 23).unwrap());
t -= Duration::new(7200, 0);
assert_eq!(t, NaiveTime::from_hms_opt(10, 34, 23).unwrap());
}
#[test]
fn test_time_fmt() {
assert_eq!(
format!("{}", NaiveTime::from_hms_milli_opt(23, 59, 59, 999).unwrap()),
"23:59:59.999"
);
assert_eq!(
format!("{}", NaiveTime::from_hms_milli_opt(23, 59, 59, 1_000).unwrap()),
"23:59:60"
);
assert_eq!(
format!("{}", NaiveTime::from_hms_milli_opt(23, 59, 59, 1_001).unwrap()),
"23:59:60.001"
);
assert_eq!(
format!("{}", NaiveTime::from_hms_micro_opt(0, 0, 0, 43210).unwrap()),
"00:00:00.043210"
);
assert_eq!(
format!("{}", NaiveTime::from_hms_nano_opt(0, 0, 0, 6543210).unwrap()),
"00:00:00.006543210"
);
// the format specifier should have no effect on `NaiveTime`
assert_eq!(
format!("{:30}", NaiveTime::from_hms_milli_opt(3, 5, 7, 9).unwrap()),
"03:05:07.009"
);
}
#[test]
fn test_time_from_str() {
// valid cases
let valid = [
"0:0:0",
"0:0:0.0000000",
"0:0:0.0000003",
" 4 : 3 : 2.1 ",
" 09:08:07 ",
" 09:08 ",
" 9:8:07 ",
"01:02:03",
"4:3:2.1",
"9:8:7",
"09:8:7",
"9:08:7",
"9:8:07",
"09:08:7",
"09:8:07",
"09:08:7",
"9:08:07",
"09:08:07",
"9:8:07.123",
"9:08:7.123",
"09:8:7.123",
"09:08:7.123",
"9:08:07.123",
"09:8:07.123",
"09:08:07.123",
"09:08:07.123",
"09:08:07.1234",
"09:08:07.12345",
"09:08:07.123456",
"09:08:07.1234567",
"09:08:07.12345678",
"09:08:07.123456789",
"09:08:07.1234567891",
"09:08:07.12345678912",
"23:59:60.373929310237",
];
for &s in &valid {
eprintln!("test_time_parse_from_str valid {:?}", s);
let d = match s.parse::<NaiveTime>() {
Ok(d) => d,
Err(e) => panic!("parsing `{}` has failed: {}", s, e),
};
let s_ = format!("{:?}", d);
// `s` and `s_` may differ, but `s.parse()` and `s_.parse()` must be same
let d_ = match s_.parse::<NaiveTime>() {
Ok(d) => d,
Err(e) => {
panic!("`{}` is parsed into `{:?}`, but reparsing that has failed: {}", s, d, e)
}
};
assert!(
d == d_,
"`{}` is parsed into `{:?}`, but reparsed result \
`{:?}` does not match",
s,
d,
d_
);
}
// some invalid cases
// since `ParseErrorKind` is private, all we can do is to check if there was an error
let invalid = [
"", // empty
"x", // invalid
"15", // missing data
"15:8:", // trailing colon
"15:8:x", // invalid data
"15:8:9x", // invalid data
"23:59:61", // invalid second (out of bounds)
"23:54:35 GMT", // invalid (timezone non-sensical for NaiveTime)
"23:54:35 +0000", // invalid (timezone non-sensical for NaiveTime)
"1441497364.649", // valid datetime, not a NaiveTime
"+1441497364.649", // valid datetime, not a NaiveTime
"+1441497364", // valid datetime, not a NaiveTime
"001:02:03", // invalid hour
"01:002:03", // invalid minute
"01:02:003", // invalid second
"12:34:56.x", // invalid fraction
"12:34:56. 0", // invalid fraction format
"09:08:00000000007", // invalid second / invalid fraction format
];
for &s in &invalid {
eprintln!("test_time_parse_from_str invalid {:?}", s);
assert!(s.parse::<NaiveTime>().is_err());
}
}
#[test]
fn test_time_parse_from_str() {
let hms = |h, m, s| NaiveTime::from_hms_opt(h, m, s).unwrap();
assert_eq!(
NaiveTime::parse_from_str("2014-5-7T12:34:56+09:30", "%Y-%m-%dT%H:%M:%S%z"),
Ok(hms(12, 34, 56))
); // ignore date and offset
assert_eq!(NaiveTime::parse_from_str("PM 12:59", "%P %H:%M"), Ok(hms(12, 59, 0)));
assert_eq!(NaiveTime::parse_from_str("12:59 \n\t PM", "%H:%M \n\t %P"), Ok(hms(12, 59, 0)));
assert_eq!(NaiveTime::parse_from_str("\t\t12:59\tPM\t", "\t\t%H:%M\t%P\t"), Ok(hms(12, 59, 0)));
assert_eq!(
NaiveTime::parse_from_str("\t\t1259\t\tPM\t", "\t\t%H%M\t\t%P\t"),
Ok(hms(12, 59, 0))
);
assert!(NaiveTime::parse_from_str("12:59 PM", "%H:%M\t%P").is_ok());
assert!(NaiveTime::parse_from_str("\t\t12:59 PM\t", "\t\t%H:%M\t%P\t").is_ok());
assert!(NaiveTime::parse_from_str("12:59 PM", "%H:%M %P").is_ok());
assert!(NaiveTime::parse_from_str("12:3456", "%H:%M:%S").is_err());
}
#[test]
fn test_overflowing_offset() {
let hmsm = |h, m, s, n| NaiveTime::from_hms_milli_opt(h, m, s, n).unwrap();
let positive_offset = FixedOffset::east_opt(4 * 60 * 60).unwrap();
// regular time
let t = hmsm(5, 6, 7, 890);
assert_eq!(t.overflowing_add_offset(positive_offset), (hmsm(9, 6, 7, 890), 0));
assert_eq!(t.overflowing_sub_offset(positive_offset), (hmsm(1, 6, 7, 890), 0));
// leap second is preserved, and wrap to next day
let t = hmsm(23, 59, 59, 1_000);
assert_eq!(t.overflowing_add_offset(positive_offset), (hmsm(3, 59, 59, 1_000), 1));
assert_eq!(t.overflowing_sub_offset(positive_offset), (hmsm(19, 59, 59, 1_000), 0));
// wrap to previous day
let t = hmsm(1, 2, 3, 456);
assert_eq!(t.overflowing_sub_offset(positive_offset), (hmsm(21, 2, 3, 456), -1));
// an odd offset
let negative_offset = FixedOffset::west_opt(((2 * 60) + 3) * 60 + 4).unwrap();
let t = hmsm(5, 6, 7, 890);
assert_eq!(t.overflowing_add_offset(negative_offset), (hmsm(3, 3, 3, 890), 0));
assert_eq!(t.overflowing_sub_offset(negative_offset), (hmsm(7, 9, 11, 890), 0));
assert_eq!(t.overflowing_add_offset(positive_offset).0, t + positive_offset);
assert_eq!(t.overflowing_sub_offset(positive_offset).0, t - positive_offset);
}
#[test]
#[cfg(feature = "rkyv-validation")]
fn test_rkyv_validation() {
let t_min = NaiveTime::MIN;
let bytes = rkyv::to_bytes::<_, 8>(&t_min).unwrap();
assert_eq!(rkyv::from_bytes::<NaiveTime>(&bytes).unwrap(), t_min);
let t_max = NaiveTime::MAX;
let bytes = rkyv::to_bytes::<_, 8>(&t_max).unwrap();
assert_eq!(rkyv::from_bytes::<NaiveTime>(&bytes).unwrap(), t_max);
}

View File

@@ -0,0 +1,236 @@
// This is a part of Chrono.
// See README.md and LICENSE.txt for details.
//! The time zone which has a fixed offset from UTC.
use core::fmt;
use core::str::FromStr;
#[cfg(any(feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"))]
use rkyv::{Archive, Deserialize, Serialize};
use super::{MappedLocalTime, Offset, TimeZone};
use crate::format::{OUT_OF_RANGE, ParseError, scan};
use crate::naive::{NaiveDate, NaiveDateTime};
/// The time zone with fixed offset, from UTC-23:59:59 to UTC+23:59:59.
///
/// Using the [`TimeZone`](./trait.TimeZone.html) methods
/// on a `FixedOffset` struct is the preferred way to construct
/// `DateTime<FixedOffset>` instances. See the [`east_opt`](#method.east_opt) and
/// [`west_opt`](#method.west_opt) methods for examples.
#[derive(PartialEq, Eq, Hash, Copy, Clone)]
#[cfg_attr(
any(feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq)),
rkyv(attr(derive(Clone, Copy, PartialEq, Eq, Hash, Debug)))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
pub struct FixedOffset {
local_minus_utc: i32,
}
impl FixedOffset {
/// Makes a new `FixedOffset` for the Eastern Hemisphere with given timezone difference.
/// The negative `secs` means the Western Hemisphere.
///
/// Panics on the out-of-bound `secs`.
#[deprecated(since = "0.4.23", note = "use `east_opt()` instead")]
#[must_use]
pub fn east(secs: i32) -> FixedOffset {
FixedOffset::east_opt(secs).expect("FixedOffset::east out of bounds")
}
/// Makes a new `FixedOffset` for the Eastern Hemisphere with given timezone difference.
/// The negative `secs` means the Western Hemisphere.
///
/// Returns `None` on the out-of-bound `secs`.
///
/// # Example
///
/// ```
/// # #[cfg(feature = "alloc")] {
/// use chrono::{FixedOffset, TimeZone};
/// let hour = 3600;
/// let datetime =
/// FixedOffset::east_opt(5 * hour).unwrap().with_ymd_and_hms(2016, 11, 08, 0, 0, 0).unwrap();
/// assert_eq!(&datetime.to_rfc3339(), "2016-11-08T00:00:00+05:00")
/// # }
/// ```
#[must_use]
pub const fn east_opt(secs: i32) -> Option<FixedOffset> {
if -86_400 < secs && secs < 86_400 {
Some(FixedOffset { local_minus_utc: secs })
} else {
None
}
}
/// Makes a new `FixedOffset` for the Western Hemisphere with given timezone difference.
/// The negative `secs` means the Eastern Hemisphere.
///
/// Panics on the out-of-bound `secs`.
#[deprecated(since = "0.4.23", note = "use `west_opt()` instead")]
#[must_use]
pub fn west(secs: i32) -> FixedOffset {
FixedOffset::west_opt(secs).expect("FixedOffset::west out of bounds")
}
/// Makes a new `FixedOffset` for the Western Hemisphere with given timezone difference.
/// The negative `secs` means the Eastern Hemisphere.
///
/// Returns `None` on the out-of-bound `secs`.
///
/// # Example
///
/// ```
/// # #[cfg(feature = "alloc")] {
/// use chrono::{FixedOffset, TimeZone};
/// let hour = 3600;
/// let datetime =
/// FixedOffset::west_opt(5 * hour).unwrap().with_ymd_and_hms(2016, 11, 08, 0, 0, 0).unwrap();
/// assert_eq!(&datetime.to_rfc3339(), "2016-11-08T00:00:00-05:00")
/// # }
/// ```
#[must_use]
pub const fn west_opt(secs: i32) -> Option<FixedOffset> {
if -86_400 < secs && secs < 86_400 {
Some(FixedOffset { local_minus_utc: -secs })
} else {
None
}
}
/// Returns the number of seconds to add to convert from UTC to the local time.
#[inline]
pub const fn local_minus_utc(&self) -> i32 {
self.local_minus_utc
}
/// Returns the number of seconds to add to convert from the local time to UTC.
#[inline]
pub const fn utc_minus_local(&self) -> i32 {
-self.local_minus_utc
}
}
/// Parsing a `str` into a `FixedOffset` uses the format [`%z`](crate::format::strftime).
impl FromStr for FixedOffset {
type Err = ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (_, offset) = scan::timezone_offset(s, scan::colon_or_space, false, false, true)?;
Self::east_opt(offset).ok_or(OUT_OF_RANGE)
}
}
impl TimeZone for FixedOffset {
type Offset = FixedOffset;
fn from_offset(offset: &FixedOffset) -> FixedOffset {
*offset
}
fn offset_from_local_date(&self, _local: &NaiveDate) -> MappedLocalTime<FixedOffset> {
MappedLocalTime::Single(*self)
}
fn offset_from_local_datetime(&self, _local: &NaiveDateTime) -> MappedLocalTime<FixedOffset> {
MappedLocalTime::Single(*self)
}
fn offset_from_utc_date(&self, _utc: &NaiveDate) -> FixedOffset {
*self
}
fn offset_from_utc_datetime(&self, _utc: &NaiveDateTime) -> FixedOffset {
*self
}
}
impl Offset for FixedOffset {
fn fix(&self) -> FixedOffset {
*self
}
}
impl fmt::Debug for FixedOffset {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let offset = self.local_minus_utc;
let (sign, offset) = if offset < 0 { ('-', -offset) } else { ('+', offset) };
let sec = offset.rem_euclid(60);
let mins = offset.div_euclid(60);
let min = mins.rem_euclid(60);
let hour = mins.div_euclid(60);
if sec == 0 {
write!(f, "{sign}{hour:02}:{min:02}")
} else {
write!(f, "{sign}{hour:02}:{min:02}:{sec:02}")
}
}
}
impl fmt::Display for FixedOffset {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self, f)
}
}
#[cfg(all(feature = "arbitrary", feature = "std"))]
impl arbitrary::Arbitrary<'_> for FixedOffset {
fn arbitrary(u: &mut arbitrary::Unstructured) -> arbitrary::Result<FixedOffset> {
let secs = u.int_in_range(-86_399..=86_399)?;
let fixed_offset = FixedOffset::east_opt(secs)
.expect("Could not generate a valid chrono::FixedOffset. It looks like implementation of Arbitrary for FixedOffset is erroneous.");
Ok(fixed_offset)
}
}
#[cfg(test)]
mod tests {
use super::FixedOffset;
use crate::offset::TimeZone;
use std::str::FromStr;
#[test]
fn test_date_extreme_offset() {
// starting from 0.3 we don't have an offset exceeding one day.
// this makes everything easier!
let offset = FixedOffset::east_opt(86399).unwrap();
assert_eq!(
format!("{:?}", offset.with_ymd_and_hms(2012, 2, 29, 5, 6, 7).unwrap()),
"2012-02-29T05:06:07+23:59:59"
);
let offset = FixedOffset::east_opt(-86399).unwrap();
assert_eq!(
format!("{:?}", offset.with_ymd_and_hms(2012, 2, 29, 5, 6, 7).unwrap()),
"2012-02-29T05:06:07-23:59:59"
);
let offset = FixedOffset::west_opt(86399).unwrap();
assert_eq!(
format!("{:?}", offset.with_ymd_and_hms(2012, 3, 4, 5, 6, 7).unwrap()),
"2012-03-04T05:06:07-23:59:59"
);
let offset = FixedOffset::west_opt(-86399).unwrap();
assert_eq!(
format!("{:?}", offset.with_ymd_and_hms(2012, 3, 4, 5, 6, 7).unwrap()),
"2012-03-04T05:06:07+23:59:59"
);
}
#[test]
fn test_parse_offset() {
let offset = FixedOffset::from_str("-0500").unwrap();
assert_eq!(offset.local_minus_utc, -5 * 3600);
let offset = FixedOffset::from_str("-08:00").unwrap();
assert_eq!(offset.local_minus_utc, -8 * 3600);
let offset = FixedOffset::from_str("+06:30").unwrap();
assert_eq!(offset.local_minus_utc, (6 * 3600) + 1800);
}
#[test]
#[cfg(feature = "rkyv-validation")]
fn test_rkyv_validation() {
let offset = FixedOffset::from_str("-0500").unwrap();
let bytes = rkyv::to_bytes::<_, 4>(&offset).unwrap();
assert_eq!(rkyv::from_bytes::<FixedOffset>(&bytes).unwrap(), offset);
}
}

View File

@@ -0,0 +1,544 @@
// This is a part of Chrono.
// See README.md and LICENSE.txt for details.
//! The local (system) time zone.
#[cfg(windows)]
use std::cmp::Ordering;
#[cfg(any(feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"))]
use rkyv::{Archive, Deserialize, Serialize};
use super::fixed::FixedOffset;
use super::{MappedLocalTime, TimeZone};
#[allow(deprecated)]
use crate::Date;
use crate::naive::{NaiveDate, NaiveDateTime, NaiveTime};
use crate::{DateTime, Utc};
#[cfg(unix)]
#[path = "unix.rs"]
mod inner;
#[cfg(windows)]
#[path = "windows.rs"]
mod inner;
#[cfg(all(windows, feature = "clock"))]
#[allow(unreachable_pub)]
mod win_bindings;
#[cfg(all(any(target_os = "android", target_env = "ohos", test), feature = "clock"))]
mod tz_data;
#[cfg(all(
not(unix),
not(windows),
not(all(
target_arch = "wasm32",
feature = "wasmbind",
not(any(target_os = "emscripten", target_os = "wasi"))
))
))]
mod inner {
use crate::{FixedOffset, MappedLocalTime, NaiveDateTime};
pub(super) fn offset_from_utc_datetime(
_utc_time: &NaiveDateTime,
) -> MappedLocalTime<FixedOffset> {
MappedLocalTime::Single(FixedOffset::east_opt(0).unwrap())
}
pub(super) fn offset_from_local_datetime(
_local_time: &NaiveDateTime,
) -> MappedLocalTime<FixedOffset> {
MappedLocalTime::Single(FixedOffset::east_opt(0).unwrap())
}
}
#[cfg(all(
target_arch = "wasm32",
feature = "wasmbind",
not(any(target_os = "emscripten", target_os = "wasi", target_os = "linux"))
))]
mod inner {
use crate::{Datelike, FixedOffset, MappedLocalTime, NaiveDateTime, Timelike};
pub(super) fn offset_from_utc_datetime(utc: &NaiveDateTime) -> MappedLocalTime<FixedOffset> {
let offset = js_sys::Date::from(utc.and_utc()).get_timezone_offset();
MappedLocalTime::Single(FixedOffset::west_opt((offset as i32) * 60).unwrap())
}
pub(super) fn offset_from_local_datetime(
local: &NaiveDateTime,
) -> MappedLocalTime<FixedOffset> {
let mut year = local.year();
if year < 100 {
// The API in `js_sys` does not let us create a `Date` with negative years.
// And values for years from `0` to `99` map to the years `1900` to `1999`.
// Shift the value by a multiple of 400 years until it is `>= 100`.
let shift_cycles = (year - 100).div_euclid(400);
year -= shift_cycles * 400;
}
let js_date = js_sys::Date::new_with_year_month_day_hr_min_sec(
year as u32,
local.month0() as i32,
local.day() as i32,
local.hour() as i32,
local.minute() as i32,
local.second() as i32,
// ignore milliseconds, our representation of leap seconds may be problematic
);
let offset = js_date.get_timezone_offset();
// We always get a result, even if this time does not exist or is ambiguous.
MappedLocalTime::Single(FixedOffset::west_opt((offset as i32) * 60).unwrap())
}
}
#[cfg(unix)]
mod tz_info;
/// The local timescale.
///
/// Using the [`TimeZone`](./trait.TimeZone.html) methods
/// on the Local struct is the preferred way to construct `DateTime<Local>`
/// instances.
///
/// # Example
///
/// ```
/// use chrono::{DateTime, Local, TimeZone};
///
/// let dt1: DateTime<Local> = Local::now();
/// let dt2: DateTime<Local> = Local.timestamp_opt(0, 0).unwrap();
/// assert!(dt1 >= dt2);
/// ```
#[derive(Copy, Clone, Debug)]
#[cfg_attr(
any(feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq)),
rkyv(attr(derive(Clone, Copy, Debug)))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))]
pub struct Local;
impl Local {
/// Returns a `Date` which corresponds to the current date.
#[deprecated(since = "0.4.23", note = "use `Local::now()` instead")]
#[allow(deprecated)]
#[must_use]
pub fn today() -> Date<Local> {
Local::now().date()
}
/// Returns a `DateTime<Local>` which corresponds to the current date, time and offset from
/// UTC.
///
/// See also the similar [`Utc::now()`] which returns `DateTime<Utc>`, i.e. without the local
/// offset.
///
/// # Example
///
/// ```
/// # #![allow(unused_variables)]
/// # use chrono::{DateTime, FixedOffset, Local};
/// // Current local time
/// let now = Local::now();
///
/// // Current local date
/// let today = now.date_naive();
///
/// // Current local time, converted to `DateTime<FixedOffset>`
/// let now_fixed_offset = Local::now().fixed_offset();
/// // or
/// let now_fixed_offset: DateTime<FixedOffset> = Local::now().into();
///
/// // Current time in some timezone (let's use +05:00)
/// // Note that it is usually more efficient to use `Utc::now` for this use case.
/// let offset = FixedOffset::east_opt(5 * 60 * 60).unwrap();
/// let now_with_offset = Local::now().with_timezone(&offset);
/// ```
pub fn now() -> DateTime<Local> {
Utc::now().with_timezone(&Local)
}
}
impl TimeZone for Local {
type Offset = FixedOffset;
fn from_offset(_offset: &FixedOffset) -> Local {
Local
}
#[allow(deprecated)]
fn offset_from_local_date(&self, local: &NaiveDate) -> MappedLocalTime<FixedOffset> {
// Get the offset at local midnight.
self.offset_from_local_datetime(&local.and_time(NaiveTime::MIN))
}
fn offset_from_local_datetime(&self, local: &NaiveDateTime) -> MappedLocalTime<FixedOffset> {
inner::offset_from_local_datetime(local)
}
#[allow(deprecated)]
fn offset_from_utc_date(&self, utc: &NaiveDate) -> FixedOffset {
// Get the offset at midnight.
self.offset_from_utc_datetime(&utc.and_time(NaiveTime::MIN))
}
fn offset_from_utc_datetime(&self, utc: &NaiveDateTime) -> FixedOffset {
inner::offset_from_utc_datetime(utc).unwrap()
}
}
#[cfg(windows)]
#[derive(Copy, Clone, Eq, PartialEq)]
struct Transition {
transition_utc: NaiveDateTime,
offset_before: FixedOffset,
offset_after: FixedOffset,
}
#[cfg(windows)]
impl Transition {
fn new(
transition_local: NaiveDateTime,
offset_before: FixedOffset,
offset_after: FixedOffset,
) -> Transition {
// It is no problem if the transition time in UTC falls a couple of hours inside the buffer
// space around the `NaiveDateTime` range (although it is very theoretical to have a
// transition at midnight around `NaiveDate::(MIN|MAX)`.
let transition_utc = transition_local.overflowing_sub_offset(offset_before);
Transition { transition_utc, offset_before, offset_after }
}
}
#[cfg(windows)]
impl PartialOrd for Transition {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.transition_utc.cmp(&other.transition_utc))
}
}
#[cfg(windows)]
impl Ord for Transition {
fn cmp(&self, other: &Self) -> Ordering {
self.transition_utc.cmp(&other.transition_utc)
}
}
// Calculate the time in UTC given a local time and transitions.
// `transitions` must be sorted.
#[cfg(windows)]
fn lookup_with_dst_transitions(
transitions: &[Transition],
dt: NaiveDateTime,
) -> MappedLocalTime<FixedOffset> {
for t in transitions.iter() {
// A transition can result in the wall clock time going forward (creating a gap) or going
// backward (creating a fold). We are interested in the earliest and latest wall time of the
// transition, as this are the times between which `dt` does may not exist or is ambiguous.
//
// It is no problem if the transition times falls a couple of hours inside the buffer
// space around the `NaiveDateTime` range (although it is very theoretical to have a
// transition at midnight around `NaiveDate::(MIN|MAX)`.
let (offset_min, offset_max) =
match t.offset_after.local_minus_utc() > t.offset_before.local_minus_utc() {
true => (t.offset_before, t.offset_after),
false => (t.offset_after, t.offset_before),
};
let wall_earliest = t.transition_utc.overflowing_add_offset(offset_min);
let wall_latest = t.transition_utc.overflowing_add_offset(offset_max);
if dt < wall_earliest {
return MappedLocalTime::Single(t.offset_before);
} else if dt <= wall_latest {
return match t.offset_after.local_minus_utc().cmp(&t.offset_before.local_minus_utc()) {
Ordering::Equal => MappedLocalTime::Single(t.offset_before),
Ordering::Less => MappedLocalTime::Ambiguous(t.offset_before, t.offset_after),
Ordering::Greater => {
if dt == wall_earliest {
MappedLocalTime::Single(t.offset_before)
} else if dt == wall_latest {
MappedLocalTime::Single(t.offset_after)
} else {
MappedLocalTime::None
}
}
};
}
}
MappedLocalTime::Single(transitions.last().unwrap().offset_after)
}
#[cfg(test)]
mod tests {
use super::Local;
use crate::offset::TimeZone;
#[cfg(windows)]
use crate::offset::local::{Transition, lookup_with_dst_transitions};
use crate::{Datelike, Days, Utc};
#[cfg(windows)]
use crate::{FixedOffset, MappedLocalTime, NaiveDate, NaiveDateTime};
#[test]
fn verify_correct_offsets() {
let now = Local::now();
let from_local = Local.from_local_datetime(&now.naive_local()).unwrap();
let from_utc = Local.from_utc_datetime(&now.naive_utc());
assert_eq!(now.offset().local_minus_utc(), from_local.offset().local_minus_utc());
assert_eq!(now.offset().local_minus_utc(), from_utc.offset().local_minus_utc());
assert_eq!(now, from_local);
assert_eq!(now, from_utc);
}
#[test]
fn verify_correct_offsets_distant_past() {
let distant_past = Local::now() - Days::new(365 * 500);
let from_local = Local.from_local_datetime(&distant_past.naive_local()).unwrap();
let from_utc = Local.from_utc_datetime(&distant_past.naive_utc());
assert_eq!(distant_past.offset().local_minus_utc(), from_local.offset().local_minus_utc());
assert_eq!(distant_past.offset().local_minus_utc(), from_utc.offset().local_minus_utc());
assert_eq!(distant_past, from_local);
assert_eq!(distant_past, from_utc);
}
#[test]
fn verify_correct_offsets_distant_future() {
let distant_future = Local::now() + Days::new(365 * 35000);
let from_local = Local.from_local_datetime(&distant_future.naive_local()).unwrap();
let from_utc = Local.from_utc_datetime(&distant_future.naive_utc());
assert_eq!(
distant_future.offset().local_minus_utc(),
from_local.offset().local_minus_utc()
);
assert_eq!(distant_future.offset().local_minus_utc(), from_utc.offset().local_minus_utc());
assert_eq!(distant_future, from_local);
assert_eq!(distant_future, from_utc);
}
#[test]
fn test_local_date_sanity_check() {
// issue #27
assert_eq!(Local.with_ymd_and_hms(2999, 12, 28, 0, 0, 0).unwrap().day(), 28);
}
#[test]
fn test_leap_second() {
// issue #123
let today = Utc::now().date_naive();
if let Some(dt) = today.and_hms_milli_opt(15, 2, 59, 1000) {
let timestr = dt.time().to_string();
// the OS API may or may not support the leap second,
// but there are only two sensible options.
assert!(
timestr == "15:02:60" || timestr == "15:03:00",
"unexpected timestr {:?}",
timestr
);
}
if let Some(dt) = today.and_hms_milli_opt(15, 2, 3, 1234) {
let timestr = dt.time().to_string();
assert!(
timestr == "15:02:03.234" || timestr == "15:02:04.234",
"unexpected timestr {:?}",
timestr
);
}
}
#[test]
#[cfg(windows)]
fn test_lookup_with_dst_transitions() {
let ymdhms = |y, m, d, h, n, s| {
NaiveDate::from_ymd_opt(y, m, d).unwrap().and_hms_opt(h, n, s).unwrap()
};
#[track_caller]
#[allow(clippy::too_many_arguments)]
fn compare_lookup(
transitions: &[Transition],
y: i32,
m: u32,
d: u32,
h: u32,
n: u32,
s: u32,
result: MappedLocalTime<FixedOffset>,
) {
let dt = NaiveDate::from_ymd_opt(y, m, d).unwrap().and_hms_opt(h, n, s).unwrap();
assert_eq!(lookup_with_dst_transitions(transitions, dt), result);
}
// dst transition before std transition
// dst offset > std offset
let std = FixedOffset::east_opt(3 * 60 * 60).unwrap();
let dst = FixedOffset::east_opt(4 * 60 * 60).unwrap();
let transitions = [
Transition::new(ymdhms(2023, 3, 26, 2, 0, 0), std, dst),
Transition::new(ymdhms(2023, 10, 29, 3, 0, 0), dst, std),
];
compare_lookup(&transitions, 2023, 3, 26, 1, 0, 0, MappedLocalTime::Single(std));
compare_lookup(&transitions, 2023, 3, 26, 2, 0, 0, MappedLocalTime::Single(std));
compare_lookup(&transitions, 2023, 3, 26, 2, 30, 0, MappedLocalTime::None);
compare_lookup(&transitions, 2023, 3, 26, 3, 0, 0, MappedLocalTime::Single(dst));
compare_lookup(&transitions, 2023, 3, 26, 4, 0, 0, MappedLocalTime::Single(dst));
compare_lookup(&transitions, 2023, 10, 29, 1, 0, 0, MappedLocalTime::Single(dst));
compare_lookup(&transitions, 2023, 10, 29, 2, 0, 0, MappedLocalTime::Ambiguous(dst, std));
compare_lookup(&transitions, 2023, 10, 29, 2, 30, 0, MappedLocalTime::Ambiguous(dst, std));
compare_lookup(&transitions, 2023, 10, 29, 3, 0, 0, MappedLocalTime::Ambiguous(dst, std));
compare_lookup(&transitions, 2023, 10, 29, 4, 0, 0, MappedLocalTime::Single(std));
// std transition before dst transition
// dst offset > std offset
let std = FixedOffset::east_opt(-5 * 60 * 60).unwrap();
let dst = FixedOffset::east_opt(-4 * 60 * 60).unwrap();
let transitions = [
Transition::new(ymdhms(2023, 3, 24, 3, 0, 0), dst, std),
Transition::new(ymdhms(2023, 10, 27, 2, 0, 0), std, dst),
];
compare_lookup(&transitions, 2023, 3, 24, 1, 0, 0, MappedLocalTime::Single(dst));
compare_lookup(&transitions, 2023, 3, 24, 2, 0, 0, MappedLocalTime::Ambiguous(dst, std));
compare_lookup(&transitions, 2023, 3, 24, 2, 30, 0, MappedLocalTime::Ambiguous(dst, std));
compare_lookup(&transitions, 2023, 3, 24, 3, 0, 0, MappedLocalTime::Ambiguous(dst, std));
compare_lookup(&transitions, 2023, 3, 24, 4, 0, 0, MappedLocalTime::Single(std));
compare_lookup(&transitions, 2023, 10, 27, 1, 0, 0, MappedLocalTime::Single(std));
compare_lookup(&transitions, 2023, 10, 27, 2, 0, 0, MappedLocalTime::Single(std));
compare_lookup(&transitions, 2023, 10, 27, 2, 30, 0, MappedLocalTime::None);
compare_lookup(&transitions, 2023, 10, 27, 3, 0, 0, MappedLocalTime::Single(dst));
compare_lookup(&transitions, 2023, 10, 27, 4, 0, 0, MappedLocalTime::Single(dst));
// dst transition before std transition
// dst offset < std offset
let std = FixedOffset::east_opt(3 * 60 * 60).unwrap();
let dst = FixedOffset::east_opt((2 * 60 + 30) * 60).unwrap();
let transitions = [
Transition::new(ymdhms(2023, 3, 26, 2, 30, 0), std, dst),
Transition::new(ymdhms(2023, 10, 29, 2, 0, 0), dst, std),
];
compare_lookup(&transitions, 2023, 3, 26, 1, 0, 0, MappedLocalTime::Single(std));
compare_lookup(&transitions, 2023, 3, 26, 2, 0, 0, MappedLocalTime::Ambiguous(std, dst));
compare_lookup(&transitions, 2023, 3, 26, 2, 15, 0, MappedLocalTime::Ambiguous(std, dst));
compare_lookup(&transitions, 2023, 3, 26, 2, 30, 0, MappedLocalTime::Ambiguous(std, dst));
compare_lookup(&transitions, 2023, 3, 26, 3, 0, 0, MappedLocalTime::Single(dst));
compare_lookup(&transitions, 2023, 10, 29, 1, 0, 0, MappedLocalTime::Single(dst));
compare_lookup(&transitions, 2023, 10, 29, 2, 0, 0, MappedLocalTime::Single(dst));
compare_lookup(&transitions, 2023, 10, 29, 2, 15, 0, MappedLocalTime::None);
compare_lookup(&transitions, 2023, 10, 29, 2, 30, 0, MappedLocalTime::Single(std));
compare_lookup(&transitions, 2023, 10, 29, 3, 0, 0, MappedLocalTime::Single(std));
// std transition before dst transition
// dst offset < std offset
let std = FixedOffset::east_opt(-(4 * 60 + 30) * 60).unwrap();
let dst = FixedOffset::east_opt(-5 * 60 * 60).unwrap();
let transitions = [
Transition::new(ymdhms(2023, 3, 24, 2, 0, 0), dst, std),
Transition::new(ymdhms(2023, 10, 27, 2, 30, 0), std, dst),
];
compare_lookup(&transitions, 2023, 3, 24, 1, 0, 0, MappedLocalTime::Single(dst));
compare_lookup(&transitions, 2023, 3, 24, 2, 0, 0, MappedLocalTime::Single(dst));
compare_lookup(&transitions, 2023, 3, 24, 2, 15, 0, MappedLocalTime::None);
compare_lookup(&transitions, 2023, 3, 24, 2, 30, 0, MappedLocalTime::Single(std));
compare_lookup(&transitions, 2023, 3, 24, 3, 0, 0, MappedLocalTime::Single(std));
compare_lookup(&transitions, 2023, 10, 27, 1, 0, 0, MappedLocalTime::Single(std));
compare_lookup(&transitions, 2023, 10, 27, 2, 0, 0, MappedLocalTime::Ambiguous(std, dst));
compare_lookup(&transitions, 2023, 10, 27, 2, 15, 0, MappedLocalTime::Ambiguous(std, dst));
compare_lookup(&transitions, 2023, 10, 27, 2, 30, 0, MappedLocalTime::Ambiguous(std, dst));
compare_lookup(&transitions, 2023, 10, 27, 3, 0, 0, MappedLocalTime::Single(dst));
// offset stays the same
let std = FixedOffset::east_opt(3 * 60 * 60).unwrap();
let transitions = [
Transition::new(ymdhms(2023, 3, 26, 2, 0, 0), std, std),
Transition::new(ymdhms(2023, 10, 29, 3, 0, 0), std, std),
];
compare_lookup(&transitions, 2023, 3, 26, 2, 0, 0, MappedLocalTime::Single(std));
compare_lookup(&transitions, 2023, 10, 29, 3, 0, 0, MappedLocalTime::Single(std));
// single transition
let std = FixedOffset::east_opt(3 * 60 * 60).unwrap();
let dst = FixedOffset::east_opt(4 * 60 * 60).unwrap();
let transitions = [Transition::new(ymdhms(2023, 3, 26, 2, 0, 0), std, dst)];
compare_lookup(&transitions, 2023, 3, 26, 1, 0, 0, MappedLocalTime::Single(std));
compare_lookup(&transitions, 2023, 3, 26, 2, 0, 0, MappedLocalTime::Single(std));
compare_lookup(&transitions, 2023, 3, 26, 2, 30, 0, MappedLocalTime::None);
compare_lookup(&transitions, 2023, 3, 26, 3, 0, 0, MappedLocalTime::Single(dst));
compare_lookup(&transitions, 2023, 3, 26, 4, 0, 0, MappedLocalTime::Single(dst));
}
#[test]
#[cfg(windows)]
fn test_lookup_with_dst_transitions_limits() {
// Transition beyond UTC year end doesn't panic in year of `NaiveDate::MAX`
let std = FixedOffset::east_opt(3 * 60 * 60).unwrap();
let dst = FixedOffset::east_opt(4 * 60 * 60).unwrap();
let transitions = [
Transition::new(NaiveDateTime::MAX.with_month(7).unwrap(), std, dst),
Transition::new(NaiveDateTime::MAX, dst, std),
];
assert_eq!(
lookup_with_dst_transitions(&transitions, NaiveDateTime::MAX.with_month(3).unwrap()),
MappedLocalTime::Single(std)
);
assert_eq!(
lookup_with_dst_transitions(&transitions, NaiveDateTime::MAX.with_month(8).unwrap()),
MappedLocalTime::Single(dst)
);
// Doesn't panic with `NaiveDateTime::MAX` as argument (which would be out of range when
// converted to UTC).
assert_eq!(
lookup_with_dst_transitions(&transitions, NaiveDateTime::MAX),
MappedLocalTime::Ambiguous(dst, std)
);
// Transition before UTC year end doesn't panic in year of `NaiveDate::MIN`
let std = FixedOffset::west_opt(3 * 60 * 60).unwrap();
let dst = FixedOffset::west_opt(4 * 60 * 60).unwrap();
let transitions = [
Transition::new(NaiveDateTime::MIN, std, dst),
Transition::new(NaiveDateTime::MIN.with_month(6).unwrap(), dst, std),
];
assert_eq!(
lookup_with_dst_transitions(&transitions, NaiveDateTime::MIN.with_month(3).unwrap()),
MappedLocalTime::Single(dst)
);
assert_eq!(
lookup_with_dst_transitions(&transitions, NaiveDateTime::MIN.with_month(8).unwrap()),
MappedLocalTime::Single(std)
);
// Doesn't panic with `NaiveDateTime::MIN` as argument (which would be out of range when
// converted to UTC).
assert_eq!(
lookup_with_dst_transitions(&transitions, NaiveDateTime::MIN),
MappedLocalTime::Ambiguous(std, dst)
);
}
#[test]
#[cfg(feature = "rkyv-validation")]
fn test_rkyv_validation() {
let local = Local;
// Local is a ZST and serializes to 0 bytes
let bytes = rkyv::to_bytes::<_, 0>(&local).unwrap();
assert_eq!(bytes.len(), 0);
// but is deserialized to an archived variant without a
// wrapping object
assert_eq!(rkyv::from_bytes::<Local>(&bytes).unwrap(), super::ArchivedLocal);
}
}

View File

@@ -0,0 +1,267 @@
//! Rust parser of ZoneInfoDb(`tzdata`) on Android and OpenHarmony
//!
//! Ported from: https://android.googlesource.com/platform/prebuilts/fullsdk/sources/+/refs/heads/androidx-appcompat-release/android-34/com/android/i18n/timezone/ZoneInfoDb.java
use std::{
ffi::CStr,
fmt::Debug,
fs::File,
io::{Error, ErrorKind, Read, Result, Seek, SeekFrom},
};
/// Get timezone data from the `tzdata` file of HarmonyOS NEXT.
#[cfg(target_env = "ohos")]
pub(crate) fn for_zone(tz_string: &str) -> Result<Option<Vec<u8>>> {
let mut file = File::open("/system/etc/zoneinfo/tzdata")?;
find_tz_data::<OHOS_ENTRY_LEN>(&mut file, tz_string.as_bytes())
}
/// Get timezone data from the `tzdata` file of Android.
#[cfg(target_os = "android")]
pub(crate) fn for_zone(tz_string: &str) -> Result<Option<Vec<u8>>> {
let mut file = open_android_tz_data_file()?;
find_tz_data::<ANDROID_ENTRY_LEN>(&mut file, tz_string.as_bytes())
}
/// Open the `tzdata` file of Android from the environment variables.
#[cfg(target_os = "android")]
fn open_android_tz_data_file() -> Result<File> {
for (env_var, path) in
[("ANDROID_DATA", "/misc/zoneinfo"), ("ANDROID_ROOT", "/usr/share/zoneinfo")]
{
if let Ok(env_value) = std::env::var(env_var) {
if let Ok(file) = File::open(format!("{}{}/tzdata", env_value, path)) {
return Ok(file);
}
}
}
Err(Error::from(ErrorKind::NotFound))
}
/// Get timezone data from the `tzdata` file reader
#[cfg(any(test, target_env = "ohos", target_os = "android"))]
fn find_tz_data<const ENTRY_LEN: usize>(
mut reader: impl Read + Seek,
tz_name: &[u8],
) -> Result<Option<Vec<u8>>> {
let header = TzDataHeader::new(&mut reader)?;
let index = TzDataIndexes::new::<ENTRY_LEN>(&mut reader, &header)?;
Ok(if let Some(entry) = index.find_timezone(tz_name) {
Some(index.find_tzdata(reader, &header, entry)?)
} else {
None
})
}
/// Header of the `tzdata` file.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
struct TzDataHeader {
version: [u8; 5],
index_offset: u32,
data_offset: u32,
zonetab_offset: u32,
}
impl TzDataHeader {
/// Parse the header of the `tzdata` file.
fn new(mut data: impl Read) -> Result<Self> {
let version = {
let mut magic = [0; TZDATA_VERSION_LEN];
data.read_exact(&mut magic)?;
if !magic.starts_with(b"tzdata") || magic[TZDATA_VERSION_LEN - 1] != 0 {
return Err(Error::new(ErrorKind::Other, "invalid tzdata header magic"));
}
let mut version = [0; 5];
version.copy_from_slice(&magic[6..11]);
version
};
let mut offset = [0; 4];
data.read_exact(&mut offset)?;
let index_offset = u32::from_be_bytes(offset);
data.read_exact(&mut offset)?;
let data_offset = u32::from_be_bytes(offset);
data.read_exact(&mut offset)?;
let zonetab_offset = u32::from_be_bytes(offset);
Ok(Self { version, index_offset, data_offset, zonetab_offset })
}
}
/// Indexes of the `tzdata` file.
struct TzDataIndexes {
indexes: Vec<TzDataIndex>,
}
impl TzDataIndexes {
/// Create a new `TzDataIndexes` from the `tzdata` file reader.
fn new<const ENTRY_LEN: usize>(mut reader: impl Read, header: &TzDataHeader) -> Result<Self> {
let mut buf = vec![0; header.data_offset.saturating_sub(header.index_offset) as usize];
reader.read_exact(&mut buf)?;
// replace chunks with array_chunks when it's stable
Ok(TzDataIndexes {
indexes: buf
.chunks(ENTRY_LEN)
.filter_map(|chunk| {
from_bytes_until_nul(&chunk[..TZ_NAME_LEN]).map(|name| {
let name = name.to_bytes().to_vec().into_boxed_slice();
let offset = u32::from_be_bytes(
chunk[TZ_NAME_LEN..TZ_NAME_LEN + 4].try_into().unwrap(),
);
let length = u32::from_be_bytes(
chunk[TZ_NAME_LEN + 4..TZ_NAME_LEN + 8].try_into().unwrap(),
);
TzDataIndex { name, offset, length }
})
})
.collect(),
})
}
/// Find a timezone by name.
fn find_timezone(&self, timezone: &[u8]) -> Option<&TzDataIndex> {
// timezones in tzdata are sorted by name.
self.indexes.binary_search_by_key(&timezone, |x| &x.name).map(|x| &self.indexes[x]).ok()
}
/// Retrieve a chunk of timezone data by the index.
fn find_tzdata(
&self,
mut reader: impl Read + Seek,
header: &TzDataHeader,
index: &TzDataIndex,
) -> Result<Vec<u8>> {
reader.seek(SeekFrom::Start(index.offset as u64 + header.data_offset as u64))?;
let mut buffer = vec![0; index.length as usize];
reader.read_exact(&mut buffer)?;
Ok(buffer)
}
}
/// Index entry of the `tzdata` file.
struct TzDataIndex {
name: Box<[u8]>,
offset: u32,
length: u32,
}
/// TODO: Change this `CStr::from_bytes_until_nul` once MSRV was bumped above 1.72.0
fn from_bytes_until_nul(bytes: &[u8]) -> Option<&CStr> {
let nul_pos = bytes.iter().position(|&b| b == 0)?;
// SAFETY:
// 1. nul_pos + 1 <= bytes.len()
// 2. We know there is a nul byte at nul_pos, so this slice (ending at the nul byte) is a well-formed C string.
Some(unsafe { CStr::from_bytes_with_nul_unchecked(&bytes[..=nul_pos]) })
}
/// Ohos tzdata index entry size: `name + offset + length`
#[cfg(any(test, target_env = "ohos"))]
const OHOS_ENTRY_LEN: usize = TZ_NAME_LEN + 2 * size_of::<u32>();
/// Android tzdata index entry size: `name + offset + length + raw_utc_offset(legacy)`:
/// [reference](https://android.googlesource.com/platform/prebuilts/fullsdk/sources/+/refs/heads/androidx-appcompat-release/android-34/com/android/i18n/timezone/ZoneInfoDb.java#271)
#[cfg(any(test, target_os = "android"))]
const ANDROID_ENTRY_LEN: usize = TZ_NAME_LEN + 3 * size_of::<u32>();
/// The database reserves 40 bytes for each id.
const TZ_NAME_LEN: usize = 40;
/// Size of the version string in the header of `tzdata` file.
/// e.g. `tzdata2024b\0`
const TZDATA_VERSION_LEN: usize = 12;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_ohos_tzdata_header_and_index() {
let file = File::open("./tests/ohos/tzdata").unwrap();
let header = TzDataHeader::new(&file).unwrap();
assert_eq!(header.version, *b"2024a");
assert_eq!(header.index_offset, 24);
assert_eq!(header.data_offset, 21240);
assert_eq!(header.zonetab_offset, 272428);
let iter = TzDataIndexes::new::<OHOS_ENTRY_LEN>(&file, &header).unwrap();
assert_eq!(iter.indexes.len(), 442);
assert!(iter.find_timezone(b"Asia/Shanghai").is_some());
assert!(iter.find_timezone(b"Pacific/Noumea").is_some());
}
#[test]
fn test_ohos_tzdata_loading() {
let file = File::open("./tests/ohos/tzdata").unwrap();
let header = TzDataHeader::new(&file).unwrap();
let iter = TzDataIndexes::new::<OHOS_ENTRY_LEN>(&file, &header).unwrap();
let timezone = iter.find_timezone(b"Asia/Shanghai").unwrap();
let tzdata = iter.find_tzdata(&file, &header, timezone).unwrap();
assert_eq!(tzdata.len(), 393);
}
#[test]
fn test_invalid_tzdata_header() {
TzDataHeader::new(&b"tzdaaa2024aaaaaaaaaaaaaaa\0"[..]).unwrap_err();
}
#[test]
fn test_android_tzdata_header_and_index() {
let file = File::open("./tests/android/tzdata").unwrap();
let header = TzDataHeader::new(&file).unwrap();
assert_eq!(header.version, *b"2021a");
assert_eq!(header.index_offset, 24);
assert_eq!(header.data_offset, 30860);
assert_eq!(header.zonetab_offset, 491837);
let iter = TzDataIndexes::new::<ANDROID_ENTRY_LEN>(&file, &header).unwrap();
assert_eq!(iter.indexes.len(), 593);
assert!(iter.find_timezone(b"Asia/Shanghai").is_some());
assert!(iter.find_timezone(b"Pacific/Noumea").is_some());
}
#[test]
fn test_android_tzdata_loading() {
let file = File::open("./tests/android/tzdata").unwrap();
let header = TzDataHeader::new(&file).unwrap();
let iter = TzDataIndexes::new::<ANDROID_ENTRY_LEN>(&file, &header).unwrap();
let timezone = iter.find_timezone(b"Asia/Shanghai").unwrap();
let tzdata = iter.find_tzdata(&file, &header, timezone).unwrap();
assert_eq!(tzdata.len(), 573);
}
#[test]
fn test_ohos_tzdata_find() {
let file = File::open("./tests/ohos/tzdata").unwrap();
let tzdata = find_tz_data::<OHOS_ENTRY_LEN>(file, b"Asia/Shanghai").unwrap().unwrap();
assert_eq!(tzdata.len(), 393);
}
#[test]
fn test_ohos_tzdata_find_missing() {
let file = File::open("./tests/ohos/tzdata").unwrap();
assert!(find_tz_data::<OHOS_ENTRY_LEN>(file, b"Asia/Sjasdfai").unwrap().is_none());
}
#[test]
fn test_android_tzdata_find() {
let file = File::open("./tests/android/tzdata").unwrap();
let tzdata = find_tz_data::<ANDROID_ENTRY_LEN>(file, b"Asia/Shanghai").unwrap().unwrap();
assert_eq!(tzdata.len(), 573);
}
#[test]
fn test_android_tzdata_find_missing() {
let file = File::open("./tests/android/tzdata").unwrap();
assert!(find_tz_data::<ANDROID_ENTRY_LEN>(file, b"Asia/S000000i").unwrap().is_none());
}
#[cfg(target_env = "ohos")]
#[test]
fn test_ohos_machine_tz_data_loading() {
let tzdata = for_zone(b"Asia/Shanghai").unwrap().unwrap();
assert!(!tzdata.is_empty());
}
#[cfg(target_os = "android")]
#[test]
fn test_android_machine_tz_data_loading() {
let tzdata = for_zone(b"Asia/Shanghai").unwrap().unwrap();
assert!(!tzdata.is_empty());
}
}

View File

@@ -0,0 +1,116 @@
#![deny(missing_docs)]
#![allow(dead_code)]
#![warn(unreachable_pub)]
use std::num::ParseIntError;
use std::str::Utf8Error;
use std::time::SystemTimeError;
use std::{error, fmt, io};
mod timezone;
pub(crate) use timezone::TimeZone;
mod parser;
mod rule;
/// Unified error type for everything in the crate
#[derive(Debug)]
pub(crate) enum Error {
/// Date time error
DateTime(&'static str),
/// Local time type search error
FindLocalTimeType(&'static str),
/// Local time type error
LocalTimeType(&'static str),
/// Invalid slice for integer conversion
InvalidSlice(&'static str),
/// Invalid Tzif file
InvalidTzFile(&'static str),
/// Invalid TZ string
InvalidTzString(&'static str),
/// I/O error
Io(io::Error),
/// Out of range error
OutOfRange(&'static str),
/// Integer parsing error
ParseInt(ParseIntError),
/// Date time projection error
ProjectDateTime(&'static str),
/// System time error
SystemTime(SystemTimeError),
/// Time zone error
TimeZone(&'static str),
/// Transition rule error
TransitionRule(&'static str),
/// Unsupported Tzif file
UnsupportedTzFile(&'static str),
/// Unsupported TZ string
UnsupportedTzString(&'static str),
/// UTF-8 error
Utf8(Utf8Error),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use Error::*;
match self {
DateTime(error) => write!(f, "invalid date time: {error}"),
FindLocalTimeType(error) => error.fmt(f),
LocalTimeType(error) => write!(f, "invalid local time type: {error}"),
InvalidSlice(error) => error.fmt(f),
InvalidTzString(error) => write!(f, "invalid TZ string: {error}"),
InvalidTzFile(error) => error.fmt(f),
Io(error) => error.fmt(f),
OutOfRange(error) => error.fmt(f),
ParseInt(error) => error.fmt(f),
ProjectDateTime(error) => error.fmt(f),
SystemTime(error) => error.fmt(f),
TransitionRule(error) => write!(f, "invalid transition rule: {error}"),
TimeZone(error) => write!(f, "invalid time zone: {error}"),
UnsupportedTzFile(error) => error.fmt(f),
UnsupportedTzString(error) => write!(f, "unsupported TZ string: {error}"),
Utf8(error) => error.fmt(f),
}
}
}
impl error::Error for Error {}
impl From<io::Error> for Error {
fn from(error: io::Error) -> Self {
Error::Io(error)
}
}
impl From<ParseIntError> for Error {
fn from(error: ParseIntError) -> Self {
Error::ParseInt(error)
}
}
impl From<SystemTimeError> for Error {
fn from(error: SystemTimeError) -> Self {
Error::SystemTime(error)
}
}
impl From<Utf8Error> for Error {
fn from(error: Utf8Error) -> Self {
Error::Utf8(error)
}
}
/// Number of hours in one day
const HOURS_PER_DAY: i64 = 24;
/// Number of seconds in one hour
const SECONDS_PER_HOUR: i64 = 3600;
/// Number of seconds in one day
const SECONDS_PER_DAY: i64 = SECONDS_PER_HOUR * HOURS_PER_DAY;
/// Number of days in one week
const DAYS_PER_WEEK: i64 = 7;
/// Month days in a normal year
const DAY_IN_MONTHS_NORMAL_YEAR: [i64; 12] = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31];
/// Cumulated month days in a normal year
const CUMUL_DAY_IN_MONTHS_NORMAL_YEAR: [i64; 12] =
[0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334];

View File

@@ -0,0 +1,348 @@
use std::io::{self, ErrorKind};
use std::iter;
use std::num::ParseIntError;
use std::str::{self, FromStr};
use super::Error;
use super::rule::TransitionRule;
use super::timezone::{LeapSecond, LocalTimeType, TimeZone, Transition};
pub(super) fn parse(bytes: &[u8]) -> Result<TimeZone, Error> {
let mut cursor = Cursor::new(bytes);
let state = State::new(&mut cursor, true)?;
let (state, footer) = match state.header.version {
Version::V1 => match cursor.is_empty() {
true => (state, None),
false => {
return Err(Error::InvalidTzFile("remaining data after end of TZif v1 data block"));
}
},
Version::V2 | Version::V3 => {
let state = State::new(&mut cursor, false)?;
(state, Some(cursor.remaining()))
}
};
let mut transitions = Vec::with_capacity(state.header.transition_count);
for (arr_time, &local_time_type_index) in
state.transition_times.chunks_exact(state.time_size).zip(state.transition_types)
{
let unix_leap_time =
state.parse_time(&arr_time[0..state.time_size], state.header.version)?;
let local_time_type_index = local_time_type_index as usize;
transitions.push(Transition::new(unix_leap_time, local_time_type_index));
}
let mut local_time_types = Vec::with_capacity(state.header.type_count);
for arr in state.local_time_types.chunks_exact(6) {
let ut_offset = read_be_i32(&arr[..4])?;
let is_dst = match arr[4] {
0 => false,
1 => true,
_ => return Err(Error::InvalidTzFile("invalid DST indicator")),
};
let char_index = arr[5] as usize;
if char_index >= state.header.char_count {
return Err(Error::InvalidTzFile("invalid time zone name char index"));
}
let position = match state.names[char_index..].iter().position(|&c| c == b'\0') {
Some(position) => position,
None => return Err(Error::InvalidTzFile("invalid time zone name char index")),
};
let name = &state.names[char_index..char_index + position];
let name = if !name.is_empty() { Some(name) } else { None };
local_time_types.push(LocalTimeType::new(ut_offset, is_dst, name)?);
}
let mut leap_seconds = Vec::with_capacity(state.header.leap_count);
for arr in state.leap_seconds.chunks_exact(state.time_size + 4) {
let unix_leap_time = state.parse_time(&arr[0..state.time_size], state.header.version)?;
let correction = read_be_i32(&arr[state.time_size..state.time_size + 4])?;
leap_seconds.push(LeapSecond::new(unix_leap_time, correction));
}
let std_walls_iter = state.std_walls.iter().copied().chain(iter::repeat(0));
let ut_locals_iter = state.ut_locals.iter().copied().chain(iter::repeat(0));
if std_walls_iter.zip(ut_locals_iter).take(state.header.type_count).any(|pair| pair == (0, 1)) {
return Err(Error::InvalidTzFile(
"invalid couple of standard/wall and UT/local indicators",
));
}
let extra_rule = match footer {
Some(footer) => {
let footer = str::from_utf8(footer)?;
if !(footer.starts_with('\n') && footer.ends_with('\n')) {
return Err(Error::InvalidTzFile("invalid footer"));
}
let tz_string = footer.trim_matches(|c: char| c.is_ascii_whitespace());
if tz_string.starts_with(':') || tz_string.contains('\0') {
return Err(Error::InvalidTzFile("invalid footer"));
}
match tz_string.is_empty() {
true => None,
false => Some(TransitionRule::from_tz_string(
tz_string.as_bytes(),
state.header.version == Version::V3,
)?),
}
}
None => None,
};
TimeZone::new(transitions, local_time_types, leap_seconds, extra_rule)
}
/// TZif data blocks
struct State<'a> {
header: Header,
/// Time size in bytes
time_size: usize,
/// Transition times data block
transition_times: &'a [u8],
/// Transition types data block
transition_types: &'a [u8],
/// Local time types data block
local_time_types: &'a [u8],
/// Time zone names data block
names: &'a [u8],
/// Leap seconds data block
leap_seconds: &'a [u8],
/// UT/local indicators data block
std_walls: &'a [u8],
/// Standard/wall indicators data block
ut_locals: &'a [u8],
}
impl<'a> State<'a> {
/// Read TZif data blocks
fn new(cursor: &mut Cursor<'a>, first: bool) -> Result<Self, Error> {
let header = Header::new(cursor)?;
let time_size = match first {
true => 4, // We always parse V1 first
false => 8,
};
Ok(Self {
time_size,
transition_times: cursor.read_exact(header.transition_count * time_size)?,
transition_types: cursor.read_exact(header.transition_count)?,
local_time_types: cursor.read_exact(header.type_count * 6)?,
names: cursor.read_exact(header.char_count)?,
leap_seconds: cursor.read_exact(header.leap_count * (time_size + 4))?,
std_walls: cursor.read_exact(header.std_wall_count)?,
ut_locals: cursor.read_exact(header.ut_local_count)?,
header,
})
}
/// Parse time values
fn parse_time(&self, arr: &[u8], version: Version) -> Result<i64, Error> {
match version {
Version::V1 => Ok(read_be_i32(&arr[..4])?.into()),
Version::V2 | Version::V3 => read_be_i64(arr),
}
}
}
/// TZif header
#[derive(Debug)]
struct Header {
/// TZif version
version: Version,
/// Number of UT/local indicators
ut_local_count: usize,
/// Number of standard/wall indicators
std_wall_count: usize,
/// Number of leap-second records
leap_count: usize,
/// Number of transition times
transition_count: usize,
/// Number of local time type records
type_count: usize,
/// Number of time zone names bytes
char_count: usize,
}
impl Header {
fn new(cursor: &mut Cursor) -> Result<Self, Error> {
let magic = cursor.read_exact(4)?;
if magic != *b"TZif" {
return Err(Error::InvalidTzFile("invalid magic number"));
}
let version = match cursor.read_exact(1)? {
[0x00] => Version::V1,
[0x32] => Version::V2,
[0x33] => Version::V3,
_ => return Err(Error::UnsupportedTzFile("unsupported TZif version")),
};
cursor.read_exact(15)?;
let ut_local_count = cursor.read_be_u32()?;
let std_wall_count = cursor.read_be_u32()?;
let leap_count = cursor.read_be_u32()?;
let transition_count = cursor.read_be_u32()?;
let type_count = cursor.read_be_u32()?;
let char_count = cursor.read_be_u32()?;
if !(type_count != 0
&& char_count != 0
&& (ut_local_count == 0 || ut_local_count == type_count)
&& (std_wall_count == 0 || std_wall_count == type_count))
{
return Err(Error::InvalidTzFile("invalid header"));
}
Ok(Self {
version,
ut_local_count: ut_local_count as usize,
std_wall_count: std_wall_count as usize,
leap_count: leap_count as usize,
transition_count: transition_count as usize,
type_count: type_count as usize,
char_count: char_count as usize,
})
}
}
/// A `Cursor` contains a slice of a buffer and a read count.
#[derive(Debug, Eq, PartialEq)]
pub(crate) struct Cursor<'a> {
/// Slice representing the remaining data to be read
remaining: &'a [u8],
/// Number of already read bytes
read_count: usize,
}
impl<'a> Cursor<'a> {
/// Construct a new `Cursor` from remaining data
pub(crate) const fn new(remaining: &'a [u8]) -> Self {
Self { remaining, read_count: 0 }
}
pub(crate) fn peek(&self) -> Option<&u8> {
self.remaining().first()
}
/// Returns remaining data
pub(crate) const fn remaining(&self) -> &'a [u8] {
self.remaining
}
/// Returns `true` if data is remaining
pub(crate) const fn is_empty(&self) -> bool {
self.remaining.is_empty()
}
pub(crate) fn read_be_u32(&mut self) -> Result<u32, Error> {
let mut buf = [0; 4];
buf.copy_from_slice(self.read_exact(4)?);
Ok(u32::from_be_bytes(buf))
}
#[cfg(target_env = "ohos")]
pub(crate) fn seek_after(&mut self, offset: usize) -> Result<usize, io::Error> {
if offset < self.read_count {
return Err(io::Error::from(ErrorKind::UnexpectedEof));
}
match self.remaining.get((offset - self.read_count)..) {
Some(remaining) => {
self.remaining = remaining;
self.read_count = offset;
Ok(offset)
}
_ => Err(io::Error::from(ErrorKind::UnexpectedEof)),
}
}
/// Read exactly `count` bytes, reducing remaining data and incrementing read count
pub(crate) fn read_exact(&mut self, count: usize) -> Result<&'a [u8], io::Error> {
match (self.remaining.get(..count), self.remaining.get(count..)) {
(Some(result), Some(remaining)) => {
self.remaining = remaining;
self.read_count += count;
Ok(result)
}
_ => Err(io::Error::from(ErrorKind::UnexpectedEof)),
}
}
/// Read bytes and compare them to the provided tag
pub(crate) fn read_tag(&mut self, tag: &[u8]) -> Result<(), io::Error> {
if self.read_exact(tag.len())? == tag {
Ok(())
} else {
Err(io::Error::from(ErrorKind::InvalidData))
}
}
/// Read bytes if the remaining data is prefixed by the provided tag
pub(crate) fn read_optional_tag(&mut self, tag: &[u8]) -> Result<bool, io::Error> {
if self.remaining.starts_with(tag) {
self.read_exact(tag.len())?;
Ok(true)
} else {
Ok(false)
}
}
/// Read bytes as long as the provided predicate is true
pub(crate) fn read_while<F: Fn(&u8) -> bool>(&mut self, f: F) -> Result<&'a [u8], io::Error> {
match self.remaining.iter().position(|x| !f(x)) {
None => self.read_exact(self.remaining.len()),
Some(position) => self.read_exact(position),
}
}
// Parse an integer out of the ASCII digits
pub(crate) fn read_int<T: FromStr<Err = ParseIntError>>(&mut self) -> Result<T, Error> {
let bytes = self.read_while(u8::is_ascii_digit)?;
Ok(str::from_utf8(bytes)?.parse()?)
}
/// Read bytes until the provided predicate is true
pub(crate) fn read_until<F: Fn(&u8) -> bool>(&mut self, f: F) -> Result<&'a [u8], io::Error> {
match self.remaining.iter().position(f) {
None => self.read_exact(self.remaining.len()),
Some(position) => self.read_exact(position),
}
}
}
pub(crate) fn read_be_i32(bytes: &[u8]) -> Result<i32, Error> {
if bytes.len() != 4 {
return Err(Error::InvalidSlice("too short for i32"));
}
let mut buf = [0; 4];
buf.copy_from_slice(bytes);
Ok(i32::from_be_bytes(buf))
}
pub(crate) fn read_be_i64(bytes: &[u8]) -> Result<i64, Error> {
if bytes.len() != 8 {
return Err(Error::InvalidSlice("too short for i64"));
}
let mut buf = [0; 8];
buf.copy_from_slice(bytes);
Ok(i64::from_be_bytes(buf))
}
/// TZif version
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
enum Version {
/// Version 1
V1,
/// Version 2
V2,
/// Version 3
V3,
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,948 @@
//! Types related to a time zone.
use std::fs::{self, File};
use std::io::{self, Read};
use std::path::{Path, PathBuf};
use std::{cmp::Ordering, fmt, str};
use super::rule::{AlternateTime, TransitionRule};
use super::{DAYS_PER_WEEK, Error, SECONDS_PER_DAY, parser};
use crate::NaiveDateTime;
/// Time zone
#[derive(Debug, Clone, Eq, PartialEq)]
pub(crate) struct TimeZone {
/// List of transitions
transitions: Vec<Transition>,
/// List of local time types (cannot be empty)
local_time_types: Vec<LocalTimeType>,
/// List of leap seconds
leap_seconds: Vec<LeapSecond>,
/// Extra transition rule applicable after the last transition
extra_rule: Option<TransitionRule>,
}
impl TimeZone {
/// Returns local time zone.
///
/// This method in not supported on non-UNIX platforms, and returns the UTC time zone instead.
pub(crate) fn local(env_tz: Option<&str>) -> Result<Self, Error> {
match env_tz {
Some(tz) => Self::from_posix_tz(tz),
None => Self::from_posix_tz("localtime"),
}
}
/// Construct a time zone from a POSIX TZ string, as described in [the POSIX documentation of the `TZ` environment variable](https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html).
fn from_posix_tz(tz_string: &str) -> Result<Self, Error> {
// It is commonly agreed (but not standard) that setting an empty `TZ=` uses UTC.
if tz_string.is_empty() {
return Ok(Self::utc());
}
if tz_string == "localtime" {
return Self::from_tz_data(&fs::read("/etc/localtime")?);
}
// attributes are not allowed on if blocks in Rust 1.38
#[cfg(any(target_os = "android", target_env = "ohos"))]
{
if let Ok(Some(bytes)) = crate::offset::local::tz_data::for_zone(tz_string) {
return Self::from_tz_data(&bytes);
}
}
let mut chars = tz_string.chars();
if chars.next() == Some(':') {
return Self::from_file(&mut find_tz_file(chars.as_str())?);
}
if let Ok(mut file) = find_tz_file(tz_string) {
return Self::from_file(&mut file);
}
// TZ string extensions are not allowed
let tz_string = tz_string.trim_matches(|c: char| c.is_ascii_whitespace());
let rule = TransitionRule::from_tz_string(tz_string.as_bytes(), false)?;
Self::new(
vec![],
match rule {
TransitionRule::Fixed(local_time_type) => vec![local_time_type],
TransitionRule::Alternate(AlternateTime { std, dst, .. }) => vec![std, dst],
},
vec![],
Some(rule),
)
}
/// Construct a time zone
pub(super) fn new(
transitions: Vec<Transition>,
local_time_types: Vec<LocalTimeType>,
leap_seconds: Vec<LeapSecond>,
extra_rule: Option<TransitionRule>,
) -> Result<Self, Error> {
let new = Self { transitions, local_time_types, leap_seconds, extra_rule };
new.as_ref().validate()?;
Ok(new)
}
/// Construct a time zone from the contents of a time zone file
fn from_file(file: &mut File) -> Result<Self, Error> {
let mut bytes = Vec::new();
file.read_to_end(&mut bytes)?;
Self::from_tz_data(&bytes)
}
/// Construct a time zone from the contents of a time zone file
///
/// Parse TZif data as described in [RFC 8536](https://datatracker.ietf.org/doc/html/rfc8536).
pub(crate) fn from_tz_data(bytes: &[u8]) -> Result<Self, Error> {
parser::parse(bytes)
}
/// Construct a time zone with the specified UTC offset in seconds
fn fixed(ut_offset: i32) -> Result<Self, Error> {
Ok(Self {
transitions: Vec::new(),
local_time_types: vec![LocalTimeType::with_offset(ut_offset)?],
leap_seconds: Vec::new(),
extra_rule: None,
})
}
/// Construct the time zone associated to UTC
pub(crate) fn utc() -> Self {
Self {
transitions: Vec::new(),
local_time_types: vec![LocalTimeType::UTC],
leap_seconds: Vec::new(),
extra_rule: None,
}
}
/// Find the local time type associated to the time zone at the specified Unix time in seconds
pub(crate) fn find_local_time_type(&self, unix_time: i64) -> Result<&LocalTimeType, Error> {
self.as_ref().find_local_time_type(unix_time)
}
pub(crate) fn find_local_time_type_from_local(
&self,
local_time: NaiveDateTime,
) -> Result<crate::MappedLocalTime<LocalTimeType>, Error> {
self.as_ref().find_local_time_type_from_local(local_time)
}
/// Returns a reference to the time zone
fn as_ref(&'_ self) -> TimeZoneRef<'_> {
TimeZoneRef {
transitions: &self.transitions,
local_time_types: &self.local_time_types,
leap_seconds: &self.leap_seconds,
extra_rule: &self.extra_rule,
}
}
}
/// Reference to a time zone
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub(crate) struct TimeZoneRef<'a> {
/// List of transitions
transitions: &'a [Transition],
/// List of local time types (cannot be empty)
local_time_types: &'a [LocalTimeType],
/// List of leap seconds
leap_seconds: &'a [LeapSecond],
/// Extra transition rule applicable after the last transition
extra_rule: &'a Option<TransitionRule>,
}
impl<'a> TimeZoneRef<'a> {
/// Find the local time type associated to the time zone at the specified Unix time in seconds
pub(crate) fn find_local_time_type(&self, unix_time: i64) -> Result<&'a LocalTimeType, Error> {
let extra_rule = match self.transitions.last() {
None => match self.extra_rule {
Some(extra_rule) => extra_rule,
None => return Ok(&self.local_time_types[0]),
},
Some(last_transition) => {
let unix_leap_time = match self.unix_time_to_unix_leap_time(unix_time) {
Ok(unix_leap_time) => unix_leap_time,
Err(Error::OutOfRange(error)) => return Err(Error::FindLocalTimeType(error)),
Err(err) => return Err(err),
};
if unix_leap_time >= last_transition.unix_leap_time {
match self.extra_rule {
Some(extra_rule) => extra_rule,
None => {
// RFC 8536 3.2:
// "Local time for timestamps on or after the last transition is
// specified by the TZ string in the footer (Section 3.3) if present
// and nonempty; otherwise, it is unspecified."
//
// Older versions of macOS (1.12 and before?) have TZif file with a
// missing TZ string, and use the offset given by the last transition.
return Ok(
&self.local_time_types[last_transition.local_time_type_index]
);
}
}
} else {
let index = match self
.transitions
.binary_search_by_key(&unix_leap_time, Transition::unix_leap_time)
{
Ok(x) => x + 1,
Err(x) => x,
};
let local_time_type_index = if index > 0 {
self.transitions[index - 1].local_time_type_index
} else {
0
};
return Ok(&self.local_time_types[local_time_type_index]);
}
}
};
match extra_rule.find_local_time_type(unix_time) {
Ok(local_time_type) => Ok(local_time_type),
Err(Error::OutOfRange(error)) => Err(Error::FindLocalTimeType(error)),
err => err,
}
}
pub(crate) fn find_local_time_type_from_local(
&self,
local_time: NaiveDateTime,
) -> Result<crate::MappedLocalTime<LocalTimeType>, Error> {
// #TODO: this is wrong as we need 'local_time_to_local_leap_time ?
// but ... does the local time even include leap seconds ??
// let unix_leap_time = match self.unix_time_to_unix_leap_time(local_time) {
// Ok(unix_leap_time) => unix_leap_time,
// Err(Error::OutOfRange(error)) => return Err(Error::FindLocalTimeType(error)),
// Err(err) => return Err(err),
// };
let local_leap_time = local_time.and_utc().timestamp();
// if we have at least one transition,
// we must check _all_ of them, in case of any Overlapping (MappedLocalTime::Ambiguous) or Skipping (MappedLocalTime::None) transitions
let offset_after_last = if !self.transitions.is_empty() {
let mut prev = self.local_time_types[0];
for transition in self.transitions {
let after_ltt = self.local_time_types[transition.local_time_type_index];
// the end and start here refers to where the time starts prior to the transition
// and where it ends up after. not the temporal relationship.
let transition_end = transition.unix_leap_time + i64::from(after_ltt.ut_offset);
let transition_start = transition.unix_leap_time + i64::from(prev.ut_offset);
match transition_start.cmp(&transition_end) {
Ordering::Greater => {
// backwards transition, eg from DST to regular
// this means a given local time could have one of two possible offsets
if local_leap_time < transition_end {
return Ok(crate::MappedLocalTime::Single(prev));
} else if local_leap_time >= transition_end
&& local_leap_time <= transition_start
{
if prev.ut_offset < after_ltt.ut_offset {
return Ok(crate::MappedLocalTime::Ambiguous(prev, after_ltt));
} else {
return Ok(crate::MappedLocalTime::Ambiguous(after_ltt, prev));
}
}
}
Ordering::Equal => {
// should this ever happen? presumably we have to handle it anyway.
if local_leap_time < transition_start {
return Ok(crate::MappedLocalTime::Single(prev));
} else if local_leap_time == transition_end {
if prev.ut_offset < after_ltt.ut_offset {
return Ok(crate::MappedLocalTime::Ambiguous(prev, after_ltt));
} else {
return Ok(crate::MappedLocalTime::Ambiguous(after_ltt, prev));
}
}
}
Ordering::Less => {
// forwards transition, eg from regular to DST
// this means that times that are skipped are invalid local times
if local_leap_time <= transition_start {
return Ok(crate::MappedLocalTime::Single(prev));
} else if local_leap_time < transition_end {
return Ok(crate::MappedLocalTime::None);
} else if local_leap_time == transition_end {
return Ok(crate::MappedLocalTime::Single(after_ltt));
}
}
}
// try the next transition, we are fully after this one
prev = after_ltt;
}
prev
} else {
self.local_time_types[0]
};
if let Some(extra_rule) = self.extra_rule {
match extra_rule.find_local_time_type_from_local(local_time) {
Ok(local_time_type) => Ok(local_time_type),
Err(Error::OutOfRange(error)) => Err(Error::FindLocalTimeType(error)),
err => err,
}
} else {
Ok(crate::MappedLocalTime::Single(offset_after_last))
}
}
/// Check time zone inputs
fn validate(&self) -> Result<(), Error> {
// Check local time types
let local_time_types_size = self.local_time_types.len();
if local_time_types_size == 0 {
return Err(Error::TimeZone("list of local time types must not be empty"));
}
// Check transitions
let mut i_transition = 0;
while i_transition < self.transitions.len() {
if self.transitions[i_transition].local_time_type_index >= local_time_types_size {
return Err(Error::TimeZone("invalid local time type index"));
}
if i_transition + 1 < self.transitions.len()
&& self.transitions[i_transition].unix_leap_time
>= self.transitions[i_transition + 1].unix_leap_time
{
return Err(Error::TimeZone("invalid transition"));
}
i_transition += 1;
}
// Check leap seconds
if !(self.leap_seconds.is_empty()
|| self.leap_seconds[0].unix_leap_time >= 0
&& self.leap_seconds[0].correction.saturating_abs() == 1)
{
return Err(Error::TimeZone("invalid leap second"));
}
let min_interval = SECONDS_PER_28_DAYS - 1;
let mut i_leap_second = 0;
while i_leap_second < self.leap_seconds.len() {
if i_leap_second + 1 < self.leap_seconds.len() {
let x0 = &self.leap_seconds[i_leap_second];
let x1 = &self.leap_seconds[i_leap_second + 1];
let diff_unix_leap_time = x1.unix_leap_time.saturating_sub(x0.unix_leap_time);
let abs_diff_correction =
x1.correction.saturating_sub(x0.correction).saturating_abs();
if !(diff_unix_leap_time >= min_interval && abs_diff_correction == 1) {
return Err(Error::TimeZone("invalid leap second"));
}
}
i_leap_second += 1;
}
// Check extra rule
let (extra_rule, last_transition) = match (&self.extra_rule, self.transitions.last()) {
(Some(rule), Some(trans)) => (rule, trans),
_ => return Ok(()),
};
let last_local_time_type = &self.local_time_types[last_transition.local_time_type_index];
let unix_time = match self.unix_leap_time_to_unix_time(last_transition.unix_leap_time) {
Ok(unix_time) => unix_time,
Err(Error::OutOfRange(error)) => return Err(Error::TimeZone(error)),
Err(err) => return Err(err),
};
let rule_local_time_type = match extra_rule.find_local_time_type(unix_time) {
Ok(rule_local_time_type) => rule_local_time_type,
Err(Error::OutOfRange(error)) => return Err(Error::TimeZone(error)),
Err(err) => return Err(err),
};
let check = last_local_time_type.ut_offset == rule_local_time_type.ut_offset
&& last_local_time_type.is_dst == rule_local_time_type.is_dst
&& match (&last_local_time_type.name, &rule_local_time_type.name) {
(Some(x), Some(y)) => x.equal(y),
(None, None) => true,
_ => false,
};
if !check {
return Err(Error::TimeZone(
"extra transition rule is inconsistent with the last transition",
));
}
Ok(())
}
/// Convert Unix time to Unix leap time, from the list of leap seconds in a time zone
const fn unix_time_to_unix_leap_time(&self, unix_time: i64) -> Result<i64, Error> {
let mut unix_leap_time = unix_time;
let mut i = 0;
while i < self.leap_seconds.len() {
let leap_second = &self.leap_seconds[i];
if unix_leap_time < leap_second.unix_leap_time {
break;
}
unix_leap_time = match unix_time.checked_add(leap_second.correction as i64) {
Some(unix_leap_time) => unix_leap_time,
None => return Err(Error::OutOfRange("out of range operation")),
};
i += 1;
}
Ok(unix_leap_time)
}
/// Convert Unix leap time to Unix time, from the list of leap seconds in a time zone
fn unix_leap_time_to_unix_time(&self, unix_leap_time: i64) -> Result<i64, Error> {
if unix_leap_time == i64::MIN {
return Err(Error::OutOfRange("out of range operation"));
}
let index = match self
.leap_seconds
.binary_search_by_key(&(unix_leap_time - 1), LeapSecond::unix_leap_time)
{
Ok(x) => x + 1,
Err(x) => x,
};
let correction = if index > 0 { self.leap_seconds[index - 1].correction } else { 0 };
match unix_leap_time.checked_sub(correction as i64) {
Some(unix_time) => Ok(unix_time),
None => Err(Error::OutOfRange("out of range operation")),
}
}
/// The UTC time zone
const UTC: TimeZoneRef<'static> = TimeZoneRef {
transitions: &[],
local_time_types: &[LocalTimeType::UTC],
leap_seconds: &[],
extra_rule: &None,
};
}
/// Transition of a TZif file
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub(super) struct Transition {
/// Unix leap time
unix_leap_time: i64,
/// Index specifying the local time type of the transition
local_time_type_index: usize,
}
impl Transition {
/// Construct a TZif file transition
pub(super) const fn new(unix_leap_time: i64, local_time_type_index: usize) -> Self {
Self { unix_leap_time, local_time_type_index }
}
/// Returns Unix leap time
const fn unix_leap_time(&self) -> i64 {
self.unix_leap_time
}
}
/// Leap second of a TZif file
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub(super) struct LeapSecond {
/// Unix leap time
unix_leap_time: i64,
/// Leap second correction
correction: i32,
}
impl LeapSecond {
/// Construct a TZif file leap second
pub(super) const fn new(unix_leap_time: i64, correction: i32) -> Self {
Self { unix_leap_time, correction }
}
/// Returns Unix leap time
const fn unix_leap_time(&self) -> i64 {
self.unix_leap_time
}
}
/// ASCII-encoded fixed-capacity string, used for storing time zone names
#[derive(Copy, Clone, Eq, PartialEq)]
struct TimeZoneName {
/// Length-prefixed string buffer
bytes: [u8; 8],
}
impl TimeZoneName {
/// Construct a time zone name
///
/// man tzfile(5):
/// Time zone designations should consist of at least three (3) and no more than six (6) ASCII
/// characters from the set of alphanumerics, “-”, and “+”. This is for compatibility with
/// POSIX requirements for time zone abbreviations.
fn new(input: &[u8]) -> Result<Self, Error> {
let len = input.len();
if !(3..=7).contains(&len) {
return Err(Error::LocalTimeType(
"time zone name must have between 3 and 7 characters",
));
}
let mut bytes = [0; 8];
bytes[0] = input.len() as u8;
let mut i = 0;
while i < len {
let b = input[i];
match b {
b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z' | b'+' | b'-' => {}
_ => return Err(Error::LocalTimeType("invalid characters in time zone name")),
}
bytes[i + 1] = b;
i += 1;
}
Ok(Self { bytes })
}
/// Returns time zone name as a byte slice
fn as_bytes(&self) -> &[u8] {
match self.bytes[0] {
3 => &self.bytes[1..4],
4 => &self.bytes[1..5],
5 => &self.bytes[1..6],
6 => &self.bytes[1..7],
7 => &self.bytes[1..8],
_ => unreachable!(),
}
}
/// Check if two time zone names are equal
fn equal(&self, other: &Self) -> bool {
self.bytes == other.bytes
}
}
impl AsRef<str> for TimeZoneName {
fn as_ref(&self) -> &str {
// SAFETY: ASCII is valid UTF-8
unsafe { str::from_utf8_unchecked(self.as_bytes()) }
}
}
impl fmt::Debug for TimeZoneName {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.as_ref().fmt(f)
}
}
/// Local time type associated to a time zone
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub(crate) struct LocalTimeType {
/// Offset from UTC in seconds
pub(super) ut_offset: i32,
/// Daylight Saving Time indicator
is_dst: bool,
/// Time zone name
name: Option<TimeZoneName>,
}
impl LocalTimeType {
/// Construct a local time type
pub(super) fn new(ut_offset: i32, is_dst: bool, name: Option<&[u8]>) -> Result<Self, Error> {
if ut_offset == i32::MIN {
return Err(Error::LocalTimeType("invalid UTC offset"));
}
let name = match name {
Some(name) => TimeZoneName::new(name)?,
None => return Ok(Self { ut_offset, is_dst, name: None }),
};
Ok(Self { ut_offset, is_dst, name: Some(name) })
}
/// Construct a local time type with the specified UTC offset in seconds
pub(super) const fn with_offset(ut_offset: i32) -> Result<Self, Error> {
if ut_offset == i32::MIN {
return Err(Error::LocalTimeType("invalid UTC offset"));
}
Ok(Self { ut_offset, is_dst: false, name: None })
}
/// Returns offset from UTC in seconds
pub(crate) const fn offset(&self) -> i32 {
self.ut_offset
}
/// Returns daylight saving time indicator
pub(super) const fn is_dst(&self) -> bool {
self.is_dst
}
pub(super) const UTC: LocalTimeType = Self { ut_offset: 0, is_dst: false, name: None };
}
/// Open the TZif file corresponding to a TZ string
fn find_tz_file(path: impl AsRef<Path>) -> Result<File, Error> {
// Don't check system timezone directories on non-UNIX platforms
#[cfg(not(unix))]
return Ok(File::open(path)?);
#[cfg(unix)]
{
let path = path.as_ref();
if path.is_absolute() {
return Ok(File::open(path)?);
}
for folder in &ZONE_INFO_DIRECTORIES {
if let Ok(file) = File::open(PathBuf::from(folder).join(path)) {
return Ok(file);
}
}
Err(Error::Io(io::ErrorKind::NotFound.into()))
}
}
// Possible system timezone directories
#[cfg(unix)]
const ZONE_INFO_DIRECTORIES: [&str; 4] =
["/usr/share/zoneinfo", "/share/zoneinfo", "/etc/zoneinfo", "/usr/share/lib/zoneinfo"];
/// Number of seconds in one week
pub(crate) const SECONDS_PER_WEEK: i64 = SECONDS_PER_DAY * DAYS_PER_WEEK;
/// Number of seconds in 28 days
const SECONDS_PER_28_DAYS: i64 = SECONDS_PER_DAY * 28;
#[cfg(test)]
mod tests {
use super::super::Error;
use super::{LeapSecond, LocalTimeType, TimeZone, TimeZoneName, Transition, TransitionRule};
#[test]
fn test_no_dst() -> Result<(), Error> {
let tz_string = b"HST10";
let transition_rule = TransitionRule::from_tz_string(tz_string, false)?;
assert_eq!(transition_rule, LocalTimeType::new(-36000, false, Some(b"HST"))?.into());
Ok(())
}
#[test]
fn test_error() -> Result<(), Error> {
assert!(matches!(
TransitionRule::from_tz_string(b"IST-1GMT0", false),
Err(Error::UnsupportedTzString(_))
));
assert!(matches!(
TransitionRule::from_tz_string(b"EET-2EEST", false),
Err(Error::UnsupportedTzString(_))
));
Ok(())
}
#[test]
fn test_v1_file_with_leap_seconds() -> Result<(), Error> {
let bytes = b"TZif\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\0\0\0\x01\0\0\0\x1b\0\0\0\0\0\0\0\x01\0\0\0\x04\0\0\0\0\0\0UTC\0\x04\xb2\x58\0\0\0\0\x01\x05\xa4\xec\x01\0\0\0\x02\x07\x86\x1f\x82\0\0\0\x03\x09\x67\x53\x03\0\0\0\x04\x0b\x48\x86\x84\0\0\0\x05\x0d\x2b\x0b\x85\0\0\0\x06\x0f\x0c\x3f\x06\0\0\0\x07\x10\xed\x72\x87\0\0\0\x08\x12\xce\xa6\x08\0\0\0\x09\x15\x9f\xca\x89\0\0\0\x0a\x17\x80\xfe\x0a\0\0\0\x0b\x19\x62\x31\x8b\0\0\0\x0c\x1d\x25\xea\x0c\0\0\0\x0d\x21\xda\xe5\x0d\0\0\0\x0e\x25\x9e\x9d\x8e\0\0\0\x0f\x27\x7f\xd1\x0f\0\0\0\x10\x2a\x50\xf5\x90\0\0\0\x11\x2c\x32\x29\x11\0\0\0\x12\x2e\x13\x5c\x92\0\0\0\x13\x30\xe7\x24\x13\0\0\0\x14\x33\xb8\x48\x94\0\0\0\x15\x36\x8c\x10\x15\0\0\0\x16\x43\xb7\x1b\x96\0\0\0\x17\x49\x5c\x07\x97\0\0\0\x18\x4f\xef\x93\x18\0\0\0\x19\x55\x93\x2d\x99\0\0\0\x1a\x58\x68\x46\x9a\0\0\0\x1b\0\0";
let time_zone = TimeZone::from_tz_data(bytes)?;
let time_zone_result = TimeZone::new(
Vec::new(),
vec![LocalTimeType::new(0, false, Some(b"UTC"))?],
vec![
LeapSecond::new(78796800, 1),
LeapSecond::new(94694401, 2),
LeapSecond::new(126230402, 3),
LeapSecond::new(157766403, 4),
LeapSecond::new(189302404, 5),
LeapSecond::new(220924805, 6),
LeapSecond::new(252460806, 7),
LeapSecond::new(283996807, 8),
LeapSecond::new(315532808, 9),
LeapSecond::new(362793609, 10),
LeapSecond::new(394329610, 11),
LeapSecond::new(425865611, 12),
LeapSecond::new(489024012, 13),
LeapSecond::new(567993613, 14),
LeapSecond::new(631152014, 15),
LeapSecond::new(662688015, 16),
LeapSecond::new(709948816, 17),
LeapSecond::new(741484817, 18),
LeapSecond::new(773020818, 19),
LeapSecond::new(820454419, 20),
LeapSecond::new(867715220, 21),
LeapSecond::new(915148821, 22),
LeapSecond::new(1136073622, 23),
LeapSecond::new(1230768023, 24),
LeapSecond::new(1341100824, 25),
LeapSecond::new(1435708825, 26),
LeapSecond::new(1483228826, 27),
],
None,
)?;
assert_eq!(time_zone, time_zone_result);
Ok(())
}
#[test]
fn test_v2_file() -> Result<(), Error> {
let bytes = b"TZif2\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x06\0\0\0\x06\0\0\0\0\0\0\0\x07\0\0\0\x06\0\0\0\x14\x80\0\0\0\xbb\x05\x43\x48\xbb\x21\x71\x58\xcb\x89\x3d\xc8\xd2\x23\xf4\x70\xd2\x61\x49\x38\xd5\x8d\x73\x48\x01\x02\x01\x03\x04\x01\x05\xff\xff\x6c\x02\0\0\xff\xff\x6c\x58\0\x04\xff\xff\x7a\x68\x01\x08\xff\xff\x7a\x68\x01\x0c\xff\xff\x7a\x68\x01\x10\xff\xff\x73\x60\0\x04LMT\0HST\0HDT\0HWT\0HPT\0\0\0\0\0\x01\0\0\0\0\0\x01\0TZif2\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x06\0\0\0\x06\0\0\0\0\0\0\0\x07\0\0\0\x06\0\0\0\x14\xff\xff\xff\xff\x74\xe0\x70\xbe\xff\xff\xff\xff\xbb\x05\x43\x48\xff\xff\xff\xff\xbb\x21\x71\x58\xff\xff\xff\xff\xcb\x89\x3d\xc8\xff\xff\xff\xff\xd2\x23\xf4\x70\xff\xff\xff\xff\xd2\x61\x49\x38\xff\xff\xff\xff\xd5\x8d\x73\x48\x01\x02\x01\x03\x04\x01\x05\xff\xff\x6c\x02\0\0\xff\xff\x6c\x58\0\x04\xff\xff\x7a\x68\x01\x08\xff\xff\x7a\x68\x01\x0c\xff\xff\x7a\x68\x01\x10\xff\xff\x73\x60\0\x04LMT\0HST\0HDT\0HWT\0HPT\0\0\0\0\0\x01\0\0\0\0\0\x01\0\x0aHST10\x0a";
let time_zone = TimeZone::from_tz_data(bytes)?;
let time_zone_result = TimeZone::new(
vec![
Transition::new(-2334101314, 1),
Transition::new(-1157283000, 2),
Transition::new(-1155436200, 1),
Transition::new(-880198200, 3),
Transition::new(-769395600, 4),
Transition::new(-765376200, 1),
Transition::new(-712150200, 5),
],
vec![
LocalTimeType::new(-37886, false, Some(b"LMT"))?,
LocalTimeType::new(-37800, false, Some(b"HST"))?,
LocalTimeType::new(-34200, true, Some(b"HDT"))?,
LocalTimeType::new(-34200, true, Some(b"HWT"))?,
LocalTimeType::new(-34200, true, Some(b"HPT"))?,
LocalTimeType::new(-36000, false, Some(b"HST"))?,
],
Vec::new(),
Some(TransitionRule::from(LocalTimeType::new(-36000, false, Some(b"HST"))?)),
)?;
assert_eq!(time_zone, time_zone_result);
assert_eq!(
*time_zone.find_local_time_type(-1156939200)?,
LocalTimeType::new(-34200, true, Some(b"HDT"))?
);
assert_eq!(
*time_zone.find_local_time_type(1546300800)?,
LocalTimeType::new(-36000, false, Some(b"HST"))?
);
Ok(())
}
#[test]
fn test_no_tz_string() -> Result<(), Error> {
// Guayaquil from macOS 10.11
let bytes = b"TZif\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x02\0\0\0\x02\0\0\0\0\0\0\0\x01\0\0\0\x02\0\0\0\x08\xb6\xa4B\x18\x01\xff\xff\xb6h\0\0\xff\xff\xb9\xb0\0\x04QMT\0ECT\0\0\0\0\0";
let time_zone = TimeZone::from_tz_data(bytes)?;
dbg!(&time_zone);
let time_zone_result = TimeZone::new(
vec![Transition::new(-1230749160, 1)],
vec![
LocalTimeType::new(-18840, false, Some(b"QMT"))?,
LocalTimeType::new(-18000, false, Some(b"ECT"))?,
],
Vec::new(),
None,
)?;
assert_eq!(time_zone, time_zone_result);
assert_eq!(
*time_zone.find_local_time_type(-1500000000)?,
LocalTimeType::new(-18840, false, Some(b"QMT"))?
);
assert_eq!(
*time_zone.find_local_time_type(0)?,
LocalTimeType::new(-18000, false, Some(b"ECT"))?
);
Ok(())
}
#[test]
fn test_tz_ascii_str() -> Result<(), Error> {
assert!(matches!(TimeZoneName::new(b""), Err(Error::LocalTimeType(_))));
assert!(matches!(TimeZoneName::new(b"A"), Err(Error::LocalTimeType(_))));
assert!(matches!(TimeZoneName::new(b"AB"), Err(Error::LocalTimeType(_))));
assert_eq!(TimeZoneName::new(b"CET")?.as_bytes(), b"CET");
assert_eq!(TimeZoneName::new(b"CHADT")?.as_bytes(), b"CHADT");
assert_eq!(TimeZoneName::new(b"abcdefg")?.as_bytes(), b"abcdefg");
assert_eq!(TimeZoneName::new(b"UTC+02")?.as_bytes(), b"UTC+02");
assert_eq!(TimeZoneName::new(b"-1230")?.as_bytes(), b"-1230");
assert!(matches!(TimeZoneName::new("0330".as_bytes()), Err(Error::LocalTimeType(_)))); // MINUS SIGN (U+2212)
assert!(matches!(TimeZoneName::new(b"\x00123"), Err(Error::LocalTimeType(_))));
assert!(matches!(TimeZoneName::new(b"12345678"), Err(Error::LocalTimeType(_))));
assert!(matches!(TimeZoneName::new(b"GMT\0\0\0"), Err(Error::LocalTimeType(_))));
Ok(())
}
#[test]
fn test_time_zone() -> Result<(), Error> {
let utc = LocalTimeType::UTC;
let cet = LocalTimeType::with_offset(3600)?;
let utc_local_time_types = vec![utc];
let fixed_extra_rule = TransitionRule::from(cet);
let time_zone_1 = TimeZone::new(vec![], utc_local_time_types.clone(), vec![], None)?;
let time_zone_2 =
TimeZone::new(vec![], utc_local_time_types.clone(), vec![], Some(fixed_extra_rule))?;
let time_zone_3 =
TimeZone::new(vec![Transition::new(0, 0)], utc_local_time_types.clone(), vec![], None)?;
let time_zone_4 = TimeZone::new(
vec![Transition::new(i32::MIN.into(), 0), Transition::new(0, 1)],
vec![utc, cet],
Vec::new(),
Some(fixed_extra_rule),
)?;
assert_eq!(*time_zone_1.find_local_time_type(0)?, utc);
assert_eq!(*time_zone_2.find_local_time_type(0)?, cet);
assert_eq!(*time_zone_3.find_local_time_type(-1)?, utc);
assert_eq!(*time_zone_3.find_local_time_type(0)?, utc);
assert_eq!(*time_zone_4.find_local_time_type(-1)?, utc);
assert_eq!(*time_zone_4.find_local_time_type(0)?, cet);
let time_zone_err = TimeZone::new(
vec![Transition::new(0, 0)],
utc_local_time_types,
vec![],
Some(fixed_extra_rule),
);
assert!(time_zone_err.is_err());
Ok(())
}
#[test]
fn test_time_zone_from_posix_tz() -> Result<(), Error> {
#[cfg(unix)]
{
// if the TZ var is set, this essentially _overrides_ the
// time set by the localtime symlink
// so just ensure that ::local() acts as expected
// in this case
if let Ok(tz) = std::env::var("TZ") {
let time_zone_local = TimeZone::local(Some(tz.as_str()))?;
let time_zone_local_1 = TimeZone::from_posix_tz(&tz)?;
assert_eq!(time_zone_local, time_zone_local_1);
}
// `TimeZone::from_posix_tz("UTC")` will return `Error` if the environment does not have
// a time zone database, like for example some docker containers.
// In that case skip the test.
if let Ok(time_zone_utc) = TimeZone::from_posix_tz("UTC") {
assert_eq!(time_zone_utc.find_local_time_type(0)?.offset(), 0);
}
}
assert!(TimeZone::from_posix_tz("EST5EDT,0/0,J365/25").is_err());
assert_eq!(TimeZone::from_posix_tz("").unwrap().find_local_time_type(0)?.offset(), 0);
Ok(())
}
#[test]
fn test_leap_seconds() -> Result<(), Error> {
let time_zone = TimeZone::new(
Vec::new(),
vec![LocalTimeType::new(0, false, Some(b"UTC"))?],
vec![
LeapSecond::new(78796800, 1),
LeapSecond::new(94694401, 2),
LeapSecond::new(126230402, 3),
LeapSecond::new(157766403, 4),
LeapSecond::new(189302404, 5),
LeapSecond::new(220924805, 6),
LeapSecond::new(252460806, 7),
LeapSecond::new(283996807, 8),
LeapSecond::new(315532808, 9),
LeapSecond::new(362793609, 10),
LeapSecond::new(394329610, 11),
LeapSecond::new(425865611, 12),
LeapSecond::new(489024012, 13),
LeapSecond::new(567993613, 14),
LeapSecond::new(631152014, 15),
LeapSecond::new(662688015, 16),
LeapSecond::new(709948816, 17),
LeapSecond::new(741484817, 18),
LeapSecond::new(773020818, 19),
LeapSecond::new(820454419, 20),
LeapSecond::new(867715220, 21),
LeapSecond::new(915148821, 22),
LeapSecond::new(1136073622, 23),
LeapSecond::new(1230768023, 24),
LeapSecond::new(1341100824, 25),
LeapSecond::new(1435708825, 26),
LeapSecond::new(1483228826, 27),
],
None,
)?;
let time_zone_ref = time_zone.as_ref();
assert!(matches!(time_zone_ref.unix_leap_time_to_unix_time(1136073621), Ok(1136073599)));
assert!(matches!(time_zone_ref.unix_leap_time_to_unix_time(1136073622), Ok(1136073600)));
assert!(matches!(time_zone_ref.unix_leap_time_to_unix_time(1136073623), Ok(1136073600)));
assert!(matches!(time_zone_ref.unix_leap_time_to_unix_time(1136073624), Ok(1136073601)));
assert!(matches!(time_zone_ref.unix_time_to_unix_leap_time(1136073599), Ok(1136073621)));
assert!(matches!(time_zone_ref.unix_time_to_unix_leap_time(1136073600), Ok(1136073623)));
assert!(matches!(time_zone_ref.unix_time_to_unix_leap_time(1136073601), Ok(1136073624)));
Ok(())
}
#[test]
fn test_leap_seconds_overflow() -> Result<(), Error> {
let time_zone_err = TimeZone::new(
vec![Transition::new(i64::MIN, 0)],
vec![LocalTimeType::UTC],
vec![LeapSecond::new(0, 1)],
Some(TransitionRule::from(LocalTimeType::UTC)),
);
assert!(time_zone_err.is_err());
let time_zone = TimeZone::new(
vec![Transition::new(i64::MAX, 0)],
vec![LocalTimeType::UTC],
vec![LeapSecond::new(0, 1)],
None,
)?;
assert!(matches!(
time_zone.find_local_time_type(i64::MAX),
Err(Error::FindLocalTimeType(_))
));
Ok(())
}
}

View File

@@ -0,0 +1,171 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::{cell::RefCell, collections::hash_map, env, fs, hash::Hasher, time::SystemTime};
use super::tz_info::TimeZone;
use super::{FixedOffset, NaiveDateTime};
use crate::MappedLocalTime;
pub(super) fn offset_from_utc_datetime(utc: &NaiveDateTime) -> MappedLocalTime<FixedOffset> {
offset(utc, false)
}
pub(super) fn offset_from_local_datetime(local: &NaiveDateTime) -> MappedLocalTime<FixedOffset> {
offset(local, true)
}
fn offset(d: &NaiveDateTime, local: bool) -> MappedLocalTime<FixedOffset> {
TZ_INFO.with(|maybe_cache| {
maybe_cache.borrow_mut().get_or_insert_with(Cache::default).offset(*d, local)
})
}
// we have to store the `Cache` in an option as it can't
// be initialized in a static context.
thread_local! {
static TZ_INFO: RefCell<Option<Cache>> = Default::default();
}
enum Source {
LocalTime { mtime: SystemTime },
Environment { hash: u64 },
}
impl Source {
fn new(env_tz: Option<&str>) -> Source {
match env_tz {
Some(tz) => {
let mut hasher = hash_map::DefaultHasher::new();
hasher.write(tz.as_bytes());
let hash = hasher.finish();
Source::Environment { hash }
}
None => match fs::symlink_metadata("/etc/localtime") {
Ok(data) => Source::LocalTime {
// we have to pick a sensible default when the mtime fails
// by picking SystemTime::now() we raise the probability of
// the cache being invalidated if/when the mtime starts working
mtime: data.modified().unwrap_or_else(|_| SystemTime::now()),
},
Err(_) => {
// as above, now() should be a better default than some constant
// TODO: see if we can improve caching in the case where the fallback is a valid timezone
Source::LocalTime { mtime: SystemTime::now() }
}
},
}
}
}
struct Cache {
zone: TimeZone,
source: Source,
last_checked: SystemTime,
}
#[cfg(target_os = "aix")]
const TZDB_LOCATION: &str = "/usr/share/lib/zoneinfo";
#[cfg(not(any(target_os = "android", target_os = "aix", target_env = "ohos")))]
const TZDB_LOCATION: &str = "/usr/share/zoneinfo";
fn fallback_timezone() -> Option<TimeZone> {
let tz_name = iana_time_zone::get_timezone().ok()?;
#[cfg(not(any(target_os = "android", target_env = "ohos")))]
let bytes = fs::read(format!("{TZDB_LOCATION}/{tz_name}")).ok()?;
#[cfg(any(target_os = "android", target_env = "ohos"))]
let bytes = crate::offset::local::tz_data::for_zone(&tz_name).ok()??;
TimeZone::from_tz_data(&bytes).ok()
}
impl Default for Cache {
fn default() -> Cache {
// default to UTC if no local timezone can be found
let env_tz = env::var("TZ").ok();
let env_ref = env_tz.as_deref();
Cache {
last_checked: SystemTime::now(),
source: Source::new(env_ref),
zone: current_zone(env_ref),
}
}
}
fn current_zone(var: Option<&str>) -> TimeZone {
TimeZone::local(var).ok().or_else(fallback_timezone).unwrap_or_else(TimeZone::utc)
}
impl Cache {
fn offset(&mut self, d: NaiveDateTime, local: bool) -> MappedLocalTime<FixedOffset> {
let now = SystemTime::now();
match now.duration_since(self.last_checked) {
// If the cache has been around for less than a second then we reuse it
// unconditionally. This is a reasonable tradeoff because the timezone
// generally won't be changing _that_ often, but if the time zone does
// change, it will reflect sufficiently quickly from an application
// user's perspective.
Ok(d) if d.as_secs() < 1 => (),
Ok(_) | Err(_) => {
let env_tz = env::var("TZ").ok();
let env_ref = env_tz.as_deref();
let new_source = Source::new(env_ref);
let out_of_date = match (&self.source, &new_source) {
// change from env to file or file to env, must recreate the zone
(Source::Environment { .. }, Source::LocalTime { .. })
| (Source::LocalTime { .. }, Source::Environment { .. }) => true,
// stay as file, but mtime has changed
(Source::LocalTime { mtime: old_mtime }, Source::LocalTime { mtime })
if old_mtime != mtime =>
{
true
}
// stay as env, but hash of variable has changed
(Source::Environment { hash: old_hash }, Source::Environment { hash })
if old_hash != hash =>
{
true
}
// cache can be reused
_ => false,
};
if out_of_date {
self.zone = current_zone(env_ref);
}
self.last_checked = now;
self.source = new_source;
}
}
if !local {
let offset = self
.zone
.find_local_time_type(d.and_utc().timestamp())
.expect("unable to select local time type")
.offset();
return match FixedOffset::east_opt(offset) {
Some(offset) => MappedLocalTime::Single(offset),
None => MappedLocalTime::None,
};
}
// we pass through the year as the year of a local point in time must either be valid in that locale, or
// the entire time was skipped in which case we will return MappedLocalTime::None anyway.
self.zone
.find_local_time_type_from_local(d)
.expect("unable to select local time type")
.and_then(|o| FixedOffset::east_opt(o.offset()))
}
}

View File

@@ -0,0 +1,59 @@
#![allow(non_snake_case, non_upper_case_globals, non_camel_case_types, dead_code, clippy::all)]
windows_link::link!("kernel32.dll" "system" fn GetTimeZoneInformationForYear(wyear : u16, pdtzi : *const DYNAMIC_TIME_ZONE_INFORMATION, ptzi : *mut TIME_ZONE_INFORMATION) -> BOOL);
windows_link::link!("kernel32.dll" "system" fn SystemTimeToFileTime(lpsystemtime : *const SYSTEMTIME, lpfiletime : *mut FILETIME) -> BOOL);
windows_link::link!("kernel32.dll" "system" fn SystemTimeToTzSpecificLocalTime(lptimezoneinformation : *const TIME_ZONE_INFORMATION, lpuniversaltime : *const SYSTEMTIME, lplocaltime : *mut SYSTEMTIME) -> BOOL);
windows_link::link!("kernel32.dll" "system" fn TzSpecificLocalTimeToSystemTime(lptimezoneinformation : *const TIME_ZONE_INFORMATION, lplocaltime : *const SYSTEMTIME, lpuniversaltime : *mut SYSTEMTIME) -> BOOL);
pub type BOOL = i32;
#[repr(C)]
#[derive(Clone, Copy)]
pub struct DYNAMIC_TIME_ZONE_INFORMATION {
pub Bias: i32,
pub StandardName: [u16; 32],
pub StandardDate: SYSTEMTIME,
pub StandardBias: i32,
pub DaylightName: [u16; 32],
pub DaylightDate: SYSTEMTIME,
pub DaylightBias: i32,
pub TimeZoneKeyName: [u16; 128],
pub DynamicDaylightTimeDisabled: bool,
}
impl Default for DYNAMIC_TIME_ZONE_INFORMATION {
fn default() -> Self {
unsafe { core::mem::zeroed() }
}
}
#[repr(C)]
#[derive(Clone, Copy, Default)]
pub struct FILETIME {
pub dwLowDateTime: u32,
pub dwHighDateTime: u32,
}
#[repr(C)]
#[derive(Clone, Copy, Default)]
pub struct SYSTEMTIME {
pub wYear: u16,
pub wMonth: u16,
pub wDayOfWeek: u16,
pub wDay: u16,
pub wHour: u16,
pub wMinute: u16,
pub wSecond: u16,
pub wMilliseconds: u16,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct TIME_ZONE_INFORMATION {
pub Bias: i32,
pub StandardName: [u16; 32],
pub StandardDate: SYSTEMTIME,
pub StandardBias: i32,
pub DaylightName: [u16; 32],
pub DaylightDate: SYSTEMTIME,
pub DaylightBias: i32,
}
impl Default for TIME_ZONE_INFORMATION {
fn default() -> Self {
unsafe { core::mem::zeroed() }
}
}

View File

@@ -0,0 +1,7 @@
--out src/offset/local/win_bindings.rs
--flat --sys --no-comment
--filter
GetTimeZoneInformationForYear
SystemTimeToFileTime
SystemTimeToTzSpecificLocalTime
TzSpecificLocalTimeToSystemTime

View File

@@ -0,0 +1,293 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cmp::Ordering;
use std::mem::MaybeUninit;
use std::ptr;
use super::win_bindings::{GetTimeZoneInformationForYear, SYSTEMTIME, TIME_ZONE_INFORMATION};
use crate::offset::local::{Transition, lookup_with_dst_transitions};
use crate::{Datelike, FixedOffset, MappedLocalTime, NaiveDate, NaiveDateTime, NaiveTime, Weekday};
// We don't use `SystemTimeToTzSpecificLocalTime` because it doesn't support the same range of dates
// as Chrono. Also it really isn't that difficult to work out the correct offset from the provided
// DST rules.
//
// This method uses `overflowing_sub_offset` because it is no problem if the transition time in UTC
// falls a couple of hours inside the buffer space around the `NaiveDateTime` range (although it is
// very theoretical to have a transition at midnight around `NaiveDate::(MIN|MAX)`.
pub(super) fn offset_from_utc_datetime(utc: &NaiveDateTime) -> MappedLocalTime<FixedOffset> {
// Using a `TzInfo` based on the year of an UTC datetime is technically wrong, we should be
// using the rules for the year of the corresponding local time. But this matches what
// `SystemTimeToTzSpecificLocalTime` is documented to do.
let tz_info = match TzInfo::for_year(utc.year()) {
Some(tz_info) => tz_info,
None => return MappedLocalTime::None,
};
let offset = match (tz_info.std_transition, tz_info.dst_transition) {
(Some(std_transition), Some(dst_transition)) => {
let std_transition_utc = std_transition.overflowing_sub_offset(tz_info.dst_offset);
let dst_transition_utc = dst_transition.overflowing_sub_offset(tz_info.std_offset);
if dst_transition_utc < std_transition_utc {
match utc >= &dst_transition_utc && utc < &std_transition_utc {
true => tz_info.dst_offset,
false => tz_info.std_offset,
}
} else {
match utc >= &std_transition_utc && utc < &dst_transition_utc {
true => tz_info.std_offset,
false => tz_info.dst_offset,
}
}
}
(Some(std_transition), None) => {
let std_transition_utc = std_transition.overflowing_sub_offset(tz_info.dst_offset);
match utc < &std_transition_utc {
true => tz_info.dst_offset,
false => tz_info.std_offset,
}
}
(None, Some(dst_transition)) => {
let dst_transition_utc = dst_transition.overflowing_sub_offset(tz_info.std_offset);
match utc < &dst_transition_utc {
true => tz_info.std_offset,
false => tz_info.dst_offset,
}
}
(None, None) => tz_info.std_offset,
};
MappedLocalTime::Single(offset)
}
// We don't use `TzSpecificLocalTimeToSystemTime` because it doesn't let us choose how to handle
// ambiguous cases (during a DST transition). Instead we get the timezone information for the
// current year and compute it ourselves, like we do on Unix.
pub(super) fn offset_from_local_datetime(local: &NaiveDateTime) -> MappedLocalTime<FixedOffset> {
let tz_info = match TzInfo::for_year(local.year()) {
Some(tz_info) => tz_info,
None => return MappedLocalTime::None,
};
// Create a sorted slice of transitions and use `lookup_with_dst_transitions`.
match (tz_info.std_transition, tz_info.dst_transition) {
(Some(std_transition), Some(dst_transition)) => {
let std_transition =
Transition::new(std_transition, tz_info.dst_offset, tz_info.std_offset);
let dst_transition =
Transition::new(dst_transition, tz_info.std_offset, tz_info.dst_offset);
let transitions = match std_transition.cmp(&dst_transition) {
Ordering::Less => [std_transition, dst_transition],
Ordering::Greater => [dst_transition, std_transition],
Ordering::Equal => {
// This doesn't make sense. Let's just return the standard offset.
return MappedLocalTime::Single(tz_info.std_offset);
}
};
lookup_with_dst_transitions(&transitions, *local)
}
(Some(std_transition), None) => {
let transitions =
[Transition::new(std_transition, tz_info.dst_offset, tz_info.std_offset)];
lookup_with_dst_transitions(&transitions, *local)
}
(None, Some(dst_transition)) => {
let transitions =
[Transition::new(dst_transition, tz_info.std_offset, tz_info.dst_offset)];
lookup_with_dst_transitions(&transitions, *local)
}
(None, None) => MappedLocalTime::Single(tz_info.std_offset),
}
}
// The basis for Windows timezone and DST support has been in place since Windows 2000. It does not
// allow for complex rules like the IANA timezone database:
// - A timezone has the same base offset the whole year.
// - There seem to be either zero or two DST transitions (but we support having just one).
// - As of Vista(?) only years from 2004 until a few years into the future are supported.
// - All other years get the base settings, which seem to be that of the current year.
//
// These details don't matter much, we just work with the offsets and transition dates Windows
// returns through `GetTimeZoneInformationForYear` for a particular year.
struct TzInfo {
// Offset from UTC during standard time.
std_offset: FixedOffset,
// Offset from UTC during daylight saving time.
dst_offset: FixedOffset,
// Transition from standard time to daylight saving time, given in local standard time.
std_transition: Option<NaiveDateTime>,
// Transition from daylight saving time to standard time, given in local daylight saving time.
dst_transition: Option<NaiveDateTime>,
}
impl TzInfo {
fn for_year(year: i32) -> Option<TzInfo> {
// The API limits years to 1601..=30827.
// Working with timezones and daylight saving time this far into the past or future makes
// little sense. But whatever is extrapolated for 1601 or 30827 is what can be extrapolated
// for years beyond.
let ref_year = year.clamp(1601, 30827) as u16;
let tz_info = unsafe {
let mut tz_info = MaybeUninit::<TIME_ZONE_INFORMATION>::uninit();
if GetTimeZoneInformationForYear(ref_year, ptr::null_mut(), tz_info.as_mut_ptr()) == 0 {
return None;
}
tz_info.assume_init()
};
let std_offset = (tz_info.Bias)
.checked_add(tz_info.StandardBias)
.and_then(|o| o.checked_mul(60))
.and_then(FixedOffset::west_opt)?;
let dst_offset = (tz_info.Bias)
.checked_add(tz_info.DaylightBias)
.and_then(|o| o.checked_mul(60))
.and_then(FixedOffset::west_opt)?;
Some(TzInfo {
std_offset,
dst_offset,
std_transition: naive_date_time_from_system_time(tz_info.StandardDate, year).ok()?,
dst_transition: naive_date_time_from_system_time(tz_info.DaylightDate, year).ok()?,
})
}
}
/// Resolve a `SYSTEMTIME` object to an `Option<NaiveDateTime>`.
///
/// A `SYSTEMTIME` within a `TIME_ZONE_INFORMATION` struct can be zero to indicate there is no
/// transition.
/// If it has year, month and day values it is a concrete date.
/// If the year is missing the `SYSTEMTIME` is a rule, which this method resolves for the provided
/// year. A rule has a month, weekday, and nth weekday of the month as components.
///
/// Returns `Err` if any of the values is invalid, which should never happen.
fn naive_date_time_from_system_time(
st: SYSTEMTIME,
year: i32,
) -> Result<Option<NaiveDateTime>, ()> {
if st.wYear == 0 && st.wMonth == 0 {
return Ok(None);
}
let time = NaiveTime::from_hms_milli_opt(
st.wHour as u32,
st.wMinute as u32,
st.wSecond as u32,
st.wMilliseconds as u32,
)
.ok_or(())?;
if st.wYear != 0 {
// We have a concrete date.
let date =
NaiveDate::from_ymd_opt(st.wYear as i32, st.wMonth as u32, st.wDay as u32).ok_or(())?;
return Ok(Some(date.and_time(time)));
}
// Resolve a rule with month, weekday, and nth weekday of the month to a date in the current
// year.
let weekday = match st.wDayOfWeek {
0 => Weekday::Sun,
1 => Weekday::Mon,
2 => Weekday::Tue,
3 => Weekday::Wed,
4 => Weekday::Thu,
5 => Weekday::Fri,
6 => Weekday::Sat,
_ => return Err(()),
};
let nth_day = match st.wDay {
1..=5 => st.wDay as u8,
_ => return Err(()),
};
let date = NaiveDate::from_weekday_of_month_opt(year, st.wMonth as u32, weekday, nth_day)
.or_else(|| NaiveDate::from_weekday_of_month_opt(year, st.wMonth as u32, weekday, 4))
.ok_or(())?; // `st.wMonth` must be invalid
Ok(Some(date.and_time(time)))
}
#[cfg(test)]
mod tests {
use crate::offset::local::win_bindings::{
FILETIME, SYSTEMTIME, SystemTimeToFileTime, TzSpecificLocalTimeToSystemTime,
};
use crate::{DateTime, FixedOffset, Local, NaiveDate, NaiveDateTime, TimeDelta};
use crate::{Datelike, TimeZone, Timelike};
use std::mem::MaybeUninit;
use std::ptr;
#[test]
fn verify_against_tz_specific_local_time_to_system_time() {
// The implementation in Windows itself is the source of truth on how to work with the OS
// timezone information. This test compares for every hour over a period of 125 years our
// implementation to `TzSpecificLocalTimeToSystemTime`.
//
// This uses parts of a previous Windows `Local` implementation in chrono.
fn from_local_time(dt: &NaiveDateTime) -> DateTime<Local> {
let st = system_time_from_naive_date_time(dt);
let utc_time = local_to_utc_time(&st);
let utc_secs = system_time_as_unix_seconds(&utc_time);
let local_secs = system_time_as_unix_seconds(&st);
let offset = (local_secs - utc_secs) as i32;
let offset = FixedOffset::east_opt(offset).unwrap();
DateTime::from_naive_utc_and_offset(*dt - offset, offset)
}
fn system_time_from_naive_date_time(dt: &NaiveDateTime) -> SYSTEMTIME {
SYSTEMTIME {
// Valid values: 1601-30827
wYear: dt.year() as u16,
// Valid values:1-12
wMonth: dt.month() as u16,
// Valid values: 0-6, starting Sunday.
// NOTE: enum returns 1-7, starting Monday, so we are
// off here, but this is not currently used in local.
wDayOfWeek: dt.weekday() as u16,
// Valid values: 1-31
wDay: dt.day() as u16,
// Valid values: 0-23
wHour: dt.hour() as u16,
// Valid values: 0-59
wMinute: dt.minute() as u16,
// Valid values: 0-59
wSecond: dt.second() as u16,
// Valid values: 0-999
wMilliseconds: 0,
}
}
fn local_to_utc_time(local: &SYSTEMTIME) -> SYSTEMTIME {
let mut sys_time = MaybeUninit::<SYSTEMTIME>::uninit();
unsafe { TzSpecificLocalTimeToSystemTime(ptr::null(), local, sys_time.as_mut_ptr()) };
// SAFETY: TzSpecificLocalTimeToSystemTime must have succeeded at this point, so we can
// assume the value is initialized.
unsafe { sys_time.assume_init() }
}
const HECTONANOSECS_IN_SEC: i64 = 10_000_000;
const HECTONANOSEC_TO_UNIX_EPOCH: i64 = 11_644_473_600 * HECTONANOSECS_IN_SEC;
fn system_time_as_unix_seconds(st: &SYSTEMTIME) -> i64 {
let mut init = MaybeUninit::<FILETIME>::uninit();
unsafe {
SystemTimeToFileTime(st, init.as_mut_ptr());
}
// SystemTimeToFileTime must have succeeded at this point, so we can assume the value is
// initialized.
let filetime = unsafe { init.assume_init() };
let bit_shift =
((filetime.dwHighDateTime as u64) << 32) | (filetime.dwLowDateTime as u64);
(bit_shift as i64 - HECTONANOSEC_TO_UNIX_EPOCH) / HECTONANOSECS_IN_SEC
}
let mut date = NaiveDate::from_ymd_opt(1975, 1, 1).unwrap().and_hms_opt(0, 30, 0).unwrap();
while date.year() < 2078 {
// Windows doesn't handle non-existing dates, it just treats it as valid.
if let Some(our_result) = Local.from_local_datetime(&date).earliest() {
assert_eq!(from_local_time(&date), our_result);
}
date += TimeDelta::try_hours(1).unwrap();
}
}
}

View File

@@ -0,0 +1,715 @@
// This is a part of Chrono.
// See README.md and LICENSE.txt for details.
//! The time zone, which calculates offsets from the local time to UTC.
//!
//! There are four operations provided by the `TimeZone` trait:
//!
//! 1. Converting the local `NaiveDateTime` to `DateTime<Tz>`
//! 2. Converting the UTC `NaiveDateTime` to `DateTime<Tz>`
//! 3. Converting `DateTime<Tz>` to the local `NaiveDateTime`
//! 4. Constructing `DateTime<Tz>` objects from various offsets
//!
//! 1 is used for constructors. 2 is used for the `with_timezone` method of date and time types.
//! 3 is used for other methods, e.g. `year()` or `format()`, and provided by an associated type
//! which implements `Offset` (which then passed to `TimeZone` for actual implementations).
//! Technically speaking `TimeZone` has a total knowledge about given timescale,
//! but `Offset` is used as a cache to avoid the repeated conversion
//! and provides implementations for 1 and 3.
//! An `TimeZone` instance can be reconstructed from the corresponding `Offset` instance.
use core::fmt;
use crate::Weekday;
use crate::format::{ParseResult, Parsed, StrftimeItems, parse};
use crate::naive::{NaiveDate, NaiveDateTime, NaiveTime};
#[allow(deprecated)]
use crate::{Date, DateTime};
pub(crate) mod fixed;
pub use self::fixed::FixedOffset;
#[cfg(feature = "clock")]
pub(crate) mod local;
#[cfg(feature = "clock")]
pub use self::local::Local;
pub(crate) mod utc;
pub use self::utc::Utc;
/// The result of mapping a local time to a concrete instant in a given time zone.
///
/// The calculation to go from a local time (wall clock time) to an instant in UTC can end up in
/// three cases:
/// * A single, simple result.
/// * An ambiguous result when the clock is turned backwards during a transition due to for example
/// DST.
/// * No result when the clock is turned forwards during a transition due to for example DST.
///
/// <div class="warning">
///
/// In wasm, when using [`Local`], only the [`LocalResult::Single`] variant is returned.
/// Specifically:
///
/// * When the clock is turned backwards, where `Ambiguous(earliest, latest)` would be expected,
/// `Single(earliest)` is returned instead.
/// * When the clock is turned forwards, where `None` would be expected, `Single(t)` is returned,
/// with `t` being the requested local time represented as though there is no transition on that
/// day (i.e. still "summer time")
///
/// This is caused because of limitations in the JavaScript
/// [`Date`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date)
/// API, which always parses a local time as a single, valid time - even for an
/// input which describes a nonexistent or ambiguous time.
///
/// See further discussion and workarounds in <https://github.com/chronotope/chrono/issues/1701>.
///
/// </div>
///
/// When the clock is turned backwards it creates a _fold_ in local time, during which the local
/// time is _ambiguous_. When the clock is turned forwards it creates a _gap_ in local time, during
/// which the local time is _missing_, or does not exist.
///
/// Chrono does not return a default choice or invalid data during time zone transitions, but has
/// the `MappedLocalTime` type to help deal with the result correctly.
///
/// The type of `T` is usually a [`DateTime`] but may also be only an offset.
pub type MappedLocalTime<T> = LocalResult<T>;
#[derive(Clone, PartialEq, Debug, Copy, Eq, Hash)]
/// Old name of [`MappedLocalTime`]. See that type for more documentation.
pub enum LocalResult<T> {
/// The local time maps to a single unique result.
Single(T),
/// The local time is _ambiguous_ because there is a _fold_ in the local time.
///
/// This variant contains the two possible results, in the order `(earliest, latest)`.
Ambiguous(T, T),
/// The local time does not exist because there is a _gap_ in the local time.
///
/// This variant may also be returned if there was an error while resolving the local time,
/// caused by for example missing time zone data files, an error in an OS API, or overflow.
None,
}
impl<T> MappedLocalTime<T> {
/// Returns `Some` if the time zone mapping has a single result.
///
/// # Errors
///
/// Returns `None` if local time falls in a _fold_ or _gap_ in the local time, or if there was
/// an error.
#[must_use]
pub fn single(self) -> Option<T> {
match self {
MappedLocalTime::Single(t) => Some(t),
_ => None,
}
}
/// Returns the earliest possible result of the time zone mapping.
///
/// # Errors
///
/// Returns `None` if local time falls in a _gap_ in the local time, or if there was an error.
#[must_use]
pub fn earliest(self) -> Option<T> {
match self {
MappedLocalTime::Single(t) | MappedLocalTime::Ambiguous(t, _) => Some(t),
_ => None,
}
}
/// Returns the latest possible result of the time zone mapping.
///
/// # Errors
///
/// Returns `None` if local time falls in a _gap_ in the local time, or if there was an error.
#[must_use]
pub fn latest(self) -> Option<T> {
match self {
MappedLocalTime::Single(t) | MappedLocalTime::Ambiguous(_, t) => Some(t),
_ => None,
}
}
/// Maps a `MappedLocalTime<T>` into `MappedLocalTime<U>` with given function.
#[must_use]
pub fn map<U, F: FnMut(T) -> U>(self, mut f: F) -> MappedLocalTime<U> {
match self {
MappedLocalTime::None => MappedLocalTime::None,
MappedLocalTime::Single(v) => MappedLocalTime::Single(f(v)),
MappedLocalTime::Ambiguous(min, max) => MappedLocalTime::Ambiguous(f(min), f(max)),
}
}
/// Maps a `MappedLocalTime<T>` into `MappedLocalTime<U>` with given function.
///
/// Returns `MappedLocalTime::None` if the function returns `None`.
#[must_use]
pub(crate) fn and_then<U, F: FnMut(T) -> Option<U>>(self, mut f: F) -> MappedLocalTime<U> {
match self {
MappedLocalTime::None => MappedLocalTime::None,
MappedLocalTime::Single(v) => match f(v) {
Some(new) => MappedLocalTime::Single(new),
None => MappedLocalTime::None,
},
MappedLocalTime::Ambiguous(min, max) => match (f(min), f(max)) {
(Some(min), Some(max)) => MappedLocalTime::Ambiguous(min, max),
_ => MappedLocalTime::None,
},
}
}
}
#[allow(deprecated)]
impl<Tz: TimeZone> MappedLocalTime<Date<Tz>> {
/// Makes a new `DateTime` from the current date and given `NaiveTime`.
/// The offset in the current date is preserved.
///
/// Propagates any error. Ambiguous result would be discarded.
#[inline]
#[must_use]
pub fn and_time(self, time: NaiveTime) -> MappedLocalTime<DateTime<Tz>> {
match self {
MappedLocalTime::Single(d) => {
d.and_time(time).map_or(MappedLocalTime::None, MappedLocalTime::Single)
}
_ => MappedLocalTime::None,
}
}
/// Makes a new `DateTime` from the current date, hour, minute and second.
/// The offset in the current date is preserved.
///
/// Propagates any error. Ambiguous result would be discarded.
#[inline]
#[must_use]
pub fn and_hms_opt(self, hour: u32, min: u32, sec: u32) -> MappedLocalTime<DateTime<Tz>> {
match self {
MappedLocalTime::Single(d) => {
d.and_hms_opt(hour, min, sec).map_or(MappedLocalTime::None, MappedLocalTime::Single)
}
_ => MappedLocalTime::None,
}
}
/// Makes a new `DateTime` from the current date, hour, minute, second and millisecond.
/// The millisecond part can exceed 1,000 in order to represent the leap second.
/// The offset in the current date is preserved.
///
/// Propagates any error. Ambiguous result would be discarded.
#[inline]
#[must_use]
pub fn and_hms_milli_opt(
self,
hour: u32,
min: u32,
sec: u32,
milli: u32,
) -> MappedLocalTime<DateTime<Tz>> {
match self {
MappedLocalTime::Single(d) => d
.and_hms_milli_opt(hour, min, sec, milli)
.map_or(MappedLocalTime::None, MappedLocalTime::Single),
_ => MappedLocalTime::None,
}
}
/// Makes a new `DateTime` from the current date, hour, minute, second and microsecond.
/// The microsecond part can exceed 1,000,000 in order to represent the leap second.
/// The offset in the current date is preserved.
///
/// Propagates any error. Ambiguous result would be discarded.
#[inline]
#[must_use]
pub fn and_hms_micro_opt(
self,
hour: u32,
min: u32,
sec: u32,
micro: u32,
) -> MappedLocalTime<DateTime<Tz>> {
match self {
MappedLocalTime::Single(d) => d
.and_hms_micro_opt(hour, min, sec, micro)
.map_or(MappedLocalTime::None, MappedLocalTime::Single),
_ => MappedLocalTime::None,
}
}
/// Makes a new `DateTime` from the current date, hour, minute, second and nanosecond.
/// The nanosecond part can exceed 1,000,000,000 in order to represent the leap second.
/// The offset in the current date is preserved.
///
/// Propagates any error. Ambiguous result would be discarded.
#[inline]
#[must_use]
pub fn and_hms_nano_opt(
self,
hour: u32,
min: u32,
sec: u32,
nano: u32,
) -> MappedLocalTime<DateTime<Tz>> {
match self {
MappedLocalTime::Single(d) => d
.and_hms_nano_opt(hour, min, sec, nano)
.map_or(MappedLocalTime::None, MappedLocalTime::Single),
_ => MappedLocalTime::None,
}
}
}
impl<T: fmt::Debug> MappedLocalTime<T> {
/// Returns a single unique conversion result or panics.
///
/// `unwrap()` is best combined with time zone types where the mapping can never fail like
/// [`Utc`] and [`FixedOffset`]. Note that for [`FixedOffset`] there is a rare case where a
/// resulting [`DateTime`] can be out of range.
///
/// # Panics
///
/// Panics if the local time falls within a _fold_ or a _gap_ in the local time, and on any
/// error that may have been returned by the type implementing [`TimeZone`].
#[must_use]
#[track_caller]
pub fn unwrap(self) -> T {
match self {
MappedLocalTime::None => panic!("No such local time"),
MappedLocalTime::Single(t) => t,
MappedLocalTime::Ambiguous(t1, t2) => {
panic!("Ambiguous local time, ranging from {t1:?} to {t2:?}")
}
}
}
}
/// The offset from the local time to UTC.
pub trait Offset: Sized + Clone + fmt::Debug {
/// Returns the fixed offset from UTC to the local time stored.
fn fix(&self) -> FixedOffset;
}
/// The time zone.
///
/// The methods here are the primary constructors for the [`DateTime`] type.
pub trait TimeZone: Sized + Clone {
/// An associated offset type.
/// This type is used to store the actual offset in date and time types.
/// The original `TimeZone` value can be recovered via `TimeZone::from_offset`.
type Offset: Offset;
/// Make a new `DateTime` from year, month, day, time components and current time zone.
///
/// This assumes the proleptic Gregorian calendar, with the year 0 being 1 BCE.
///
/// Returns `MappedLocalTime::None` on invalid input data.
fn with_ymd_and_hms(
&self,
year: i32,
month: u32,
day: u32,
hour: u32,
min: u32,
sec: u32,
) -> MappedLocalTime<DateTime<Self>> {
match NaiveDate::from_ymd_opt(year, month, day).and_then(|d| d.and_hms_opt(hour, min, sec))
{
Some(dt) => self.from_local_datetime(&dt),
None => MappedLocalTime::None,
}
}
/// Makes a new `Date` from year, month, day and the current time zone.
/// This assumes the proleptic Gregorian calendar, with the year 0 being 1 BCE.
///
/// The time zone normally does not affect the date (unless it is between UTC-24 and UTC+24),
/// but it will propagate to the `DateTime` values constructed via this date.
///
/// Panics on the out-of-range date, invalid month and/or day.
#[deprecated(since = "0.4.23", note = "use `with_ymd_and_hms()` instead")]
#[allow(deprecated)]
fn ymd(&self, year: i32, month: u32, day: u32) -> Date<Self> {
self.ymd_opt(year, month, day).unwrap()
}
/// Makes a new `Date` from year, month, day and the current time zone.
/// This assumes the proleptic Gregorian calendar, with the year 0 being 1 BCE.
///
/// The time zone normally does not affect the date (unless it is between UTC-24 and UTC+24),
/// but it will propagate to the `DateTime` values constructed via this date.
///
/// Returns `None` on the out-of-range date, invalid month and/or day.
#[deprecated(since = "0.4.23", note = "use `with_ymd_and_hms()` instead")]
#[allow(deprecated)]
fn ymd_opt(&self, year: i32, month: u32, day: u32) -> MappedLocalTime<Date<Self>> {
match NaiveDate::from_ymd_opt(year, month, day) {
Some(d) => self.from_local_date(&d),
None => MappedLocalTime::None,
}
}
/// Makes a new `Date` from year, day of year (DOY or "ordinal") and the current time zone.
/// This assumes the proleptic Gregorian calendar, with the year 0 being 1 BCE.
///
/// The time zone normally does not affect the date (unless it is between UTC-24 and UTC+24),
/// but it will propagate to the `DateTime` values constructed via this date.
///
/// Panics on the out-of-range date and/or invalid DOY.
#[deprecated(
since = "0.4.23",
note = "use `from_local_datetime()` with a `NaiveDateTime` instead"
)]
#[allow(deprecated)]
fn yo(&self, year: i32, ordinal: u32) -> Date<Self> {
self.yo_opt(year, ordinal).unwrap()
}
/// Makes a new `Date` from year, day of year (DOY or "ordinal") and the current time zone.
/// This assumes the proleptic Gregorian calendar, with the year 0 being 1 BCE.
///
/// The time zone normally does not affect the date (unless it is between UTC-24 and UTC+24),
/// but it will propagate to the `DateTime` values constructed via this date.
///
/// Returns `None` on the out-of-range date and/or invalid DOY.
#[deprecated(
since = "0.4.23",
note = "use `from_local_datetime()` with a `NaiveDateTime` instead"
)]
#[allow(deprecated)]
fn yo_opt(&self, year: i32, ordinal: u32) -> MappedLocalTime<Date<Self>> {
match NaiveDate::from_yo_opt(year, ordinal) {
Some(d) => self.from_local_date(&d),
None => MappedLocalTime::None,
}
}
/// Makes a new `Date` from ISO week date (year and week number), day of the week (DOW) and
/// the current time zone.
/// This assumes the proleptic Gregorian calendar, with the year 0 being 1 BCE.
/// The resulting `Date` may have a different year from the input year.
///
/// The time zone normally does not affect the date (unless it is between UTC-24 and UTC+24),
/// but it will propagate to the `DateTime` values constructed via this date.
///
/// Panics on the out-of-range date and/or invalid week number.
#[deprecated(
since = "0.4.23",
note = "use `from_local_datetime()` with a `NaiveDateTime` instead"
)]
#[allow(deprecated)]
fn isoywd(&self, year: i32, week: u32, weekday: Weekday) -> Date<Self> {
self.isoywd_opt(year, week, weekday).unwrap()
}
/// Makes a new `Date` from ISO week date (year and week number), day of the week (DOW) and
/// the current time zone.
/// This assumes the proleptic Gregorian calendar, with the year 0 being 1 BCE.
/// The resulting `Date` may have a different year from the input year.
///
/// The time zone normally does not affect the date (unless it is between UTC-24 and UTC+24),
/// but it will propagate to the `DateTime` values constructed via this date.
///
/// Returns `None` on the out-of-range date and/or invalid week number.
#[deprecated(
since = "0.4.23",
note = "use `from_local_datetime()` with a `NaiveDateTime` instead"
)]
#[allow(deprecated)]
fn isoywd_opt(&self, year: i32, week: u32, weekday: Weekday) -> MappedLocalTime<Date<Self>> {
match NaiveDate::from_isoywd_opt(year, week, weekday) {
Some(d) => self.from_local_date(&d),
None => MappedLocalTime::None,
}
}
/// Makes a new `DateTime` from the number of non-leap seconds
/// since January 1, 1970 0:00:00 UTC (aka "UNIX timestamp")
/// and the number of nanoseconds since the last whole non-leap second.
///
/// The nanosecond part can exceed 1,000,000,000 in order to represent a
/// [leap second](crate::NaiveTime#leap-second-handling), but only when `secs % 60 == 59`.
/// (The true "UNIX timestamp" cannot represent a leap second unambiguously.)
///
/// # Panics
///
/// Panics on the out-of-range number of seconds and/or invalid nanosecond,
/// for a non-panicking version see [`timestamp_opt`](#method.timestamp_opt).
#[deprecated(since = "0.4.23", note = "use `timestamp_opt()` instead")]
fn timestamp(&self, secs: i64, nsecs: u32) -> DateTime<Self> {
self.timestamp_opt(secs, nsecs).unwrap()
}
/// Makes a new `DateTime` from the number of non-leap seconds
/// since January 1, 1970 0:00:00 UTC (aka "UNIX timestamp")
/// and the number of nanoseconds since the last whole non-leap second.
///
/// The nanosecond part can exceed 1,000,000,000 in order to represent a
/// [leap second](crate::NaiveTime#leap-second-handling), but only when `secs % 60 == 59`.
/// (The true "UNIX timestamp" cannot represent a leap second unambiguously.)
///
/// # Errors
///
/// Returns `MappedLocalTime::None` on out-of-range number of seconds and/or
/// invalid nanosecond, otherwise always returns `MappedLocalTime::Single`.
///
/// # Example
///
/// ```
/// use chrono::{TimeZone, Utc};
///
/// assert_eq!(Utc.timestamp_opt(1431648000, 0).unwrap().to_string(), "2015-05-15 00:00:00 UTC");
/// ```
fn timestamp_opt(&self, secs: i64, nsecs: u32) -> MappedLocalTime<DateTime<Self>> {
match DateTime::from_timestamp(secs, nsecs) {
Some(dt) => MappedLocalTime::Single(self.from_utc_datetime(&dt.naive_utc())),
None => MappedLocalTime::None,
}
}
/// Makes a new `DateTime` from the number of non-leap milliseconds
/// since January 1, 1970 0:00:00 UTC (aka "UNIX timestamp").
///
/// Panics on out-of-range number of milliseconds for a non-panicking
/// version see [`timestamp_millis_opt`](#method.timestamp_millis_opt).
#[deprecated(since = "0.4.23", note = "use `timestamp_millis_opt()` instead")]
fn timestamp_millis(&self, millis: i64) -> DateTime<Self> {
self.timestamp_millis_opt(millis).unwrap()
}
/// Makes a new `DateTime` from the number of non-leap milliseconds
/// since January 1, 1970 0:00:00 UTC (aka "UNIX timestamp").
///
///
/// Returns `MappedLocalTime::None` on out-of-range number of milliseconds
/// and/or invalid nanosecond, otherwise always returns
/// `MappedLocalTime::Single`.
///
/// # Example
///
/// ```
/// use chrono::{MappedLocalTime, TimeZone, Utc};
/// match Utc.timestamp_millis_opt(1431648000) {
/// MappedLocalTime::Single(dt) => assert_eq!(dt.timestamp(), 1431648),
/// _ => panic!("Incorrect timestamp_millis"),
/// };
/// ```
fn timestamp_millis_opt(&self, millis: i64) -> MappedLocalTime<DateTime<Self>> {
match DateTime::from_timestamp_millis(millis) {
Some(dt) => MappedLocalTime::Single(self.from_utc_datetime(&dt.naive_utc())),
None => MappedLocalTime::None,
}
}
/// Makes a new `DateTime` from the number of non-leap nanoseconds
/// since January 1, 1970 0:00:00 UTC (aka "UNIX timestamp").
///
/// Unlike [`timestamp_millis_opt`](#method.timestamp_millis_opt), this never fails.
///
/// # Example
///
/// ```
/// use chrono::{TimeZone, Utc};
///
/// assert_eq!(Utc.timestamp_nanos(1431648000000000).timestamp(), 1431648);
/// ```
fn timestamp_nanos(&self, nanos: i64) -> DateTime<Self> {
self.from_utc_datetime(&DateTime::from_timestamp_nanos(nanos).naive_utc())
}
/// Makes a new `DateTime` from the number of non-leap microseconds
/// since January 1, 1970 0:00:00 UTC (aka "UNIX timestamp").
///
/// # Example
///
/// ```
/// use chrono::{TimeZone, Utc};
///
/// assert_eq!(Utc.timestamp_micros(1431648000000).unwrap().timestamp(), 1431648);
/// ```
fn timestamp_micros(&self, micros: i64) -> MappedLocalTime<DateTime<Self>> {
match DateTime::from_timestamp_micros(micros) {
Some(dt) => MappedLocalTime::Single(self.from_utc_datetime(&dt.naive_utc())),
None => MappedLocalTime::None,
}
}
/// Parses a string with the specified format string and returns a
/// `DateTime` with the current offset.
///
/// See the [`crate::format::strftime`] module on the
/// supported escape sequences.
///
/// If the to-be-parsed string includes an offset, it *must* match the
/// offset of the TimeZone, otherwise an error will be returned.
///
/// See also [`DateTime::parse_from_str`] which gives a [`DateTime`] with
/// parsed [`FixedOffset`].
///
/// See also [`NaiveDateTime::parse_from_str`] which gives a [`NaiveDateTime`] without
/// an offset, but can be converted to a [`DateTime`] with [`NaiveDateTime::and_utc`] or
/// [`NaiveDateTime::and_local_timezone`].
#[deprecated(
since = "0.4.29",
note = "use `DateTime::parse_from_str` or `NaiveDateTime::parse_from_str` with `and_utc()` or `and_local_timezone()` instead"
)]
fn datetime_from_str(&self, s: &str, fmt: &str) -> ParseResult<DateTime<Self>> {
let mut parsed = Parsed::new();
parse(&mut parsed, s, StrftimeItems::new(fmt))?;
parsed.to_datetime_with_timezone(self)
}
/// Reconstructs the time zone from the offset.
fn from_offset(offset: &Self::Offset) -> Self;
/// Creates the offset(s) for given local `NaiveDate` if possible.
fn offset_from_local_date(&self, local: &NaiveDate) -> MappedLocalTime<Self::Offset>;
/// Creates the offset(s) for given local `NaiveDateTime` if possible.
fn offset_from_local_datetime(&self, local: &NaiveDateTime) -> MappedLocalTime<Self::Offset>;
/// Converts the local `NaiveDate` to the timezone-aware `Date` if possible.
#[allow(clippy::wrong_self_convention)]
#[deprecated(since = "0.4.23", note = "use `from_local_datetime()` instead")]
#[allow(deprecated)]
fn from_local_date(&self, local: &NaiveDate) -> MappedLocalTime<Date<Self>> {
self.offset_from_local_date(local).map(|offset| {
// since FixedOffset is within +/- 1 day, the date is never affected
Date::from_utc(*local, offset)
})
}
/// Converts the local `NaiveDateTime` to the timezone-aware `DateTime` if possible.
#[allow(clippy::wrong_self_convention)]
fn from_local_datetime(&self, local: &NaiveDateTime) -> MappedLocalTime<DateTime<Self>> {
self.offset_from_local_datetime(local).and_then(|off| {
local
.checked_sub_offset(off.fix())
.map(|dt| DateTime::from_naive_utc_and_offset(dt, off))
})
}
/// Creates the offset for given UTC `NaiveDate`. This cannot fail.
fn offset_from_utc_date(&self, utc: &NaiveDate) -> Self::Offset;
/// Creates the offset for given UTC `NaiveDateTime`. This cannot fail.
fn offset_from_utc_datetime(&self, utc: &NaiveDateTime) -> Self::Offset;
/// Converts the UTC `NaiveDate` to the local time.
/// The UTC is continuous and thus this cannot fail (but can give the duplicate local time).
#[allow(clippy::wrong_self_convention)]
#[deprecated(since = "0.4.23", note = "use `from_utc_datetime()` instead")]
#[allow(deprecated)]
fn from_utc_date(&self, utc: &NaiveDate) -> Date<Self> {
Date::from_utc(*utc, self.offset_from_utc_date(utc))
}
/// Converts the UTC `NaiveDateTime` to the local time.
/// The UTC is continuous and thus this cannot fail (but can give the duplicate local time).
#[allow(clippy::wrong_self_convention)]
fn from_utc_datetime(&self, utc: &NaiveDateTime) -> DateTime<Self> {
DateTime::from_naive_utc_and_offset(*utc, self.offset_from_utc_datetime(utc))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_fixed_offset_min_max_dates() {
for offset_hour in -23..=23 {
dbg!(offset_hour);
let offset = FixedOffset::east_opt(offset_hour * 60 * 60).unwrap();
let local_max = offset.from_utc_datetime(&NaiveDateTime::MAX);
assert_eq!(local_max.naive_utc(), NaiveDateTime::MAX);
let local_min = offset.from_utc_datetime(&NaiveDateTime::MIN);
assert_eq!(local_min.naive_utc(), NaiveDateTime::MIN);
let local_max = offset.from_local_datetime(&NaiveDateTime::MAX);
if offset_hour >= 0 {
assert_eq!(local_max.unwrap().naive_local(), NaiveDateTime::MAX);
} else {
assert_eq!(local_max, MappedLocalTime::None);
}
let local_min = offset.from_local_datetime(&NaiveDateTime::MIN);
if offset_hour <= 0 {
assert_eq!(local_min.unwrap().naive_local(), NaiveDateTime::MIN);
} else {
assert_eq!(local_min, MappedLocalTime::None);
}
}
}
#[test]
fn test_negative_millis() {
let dt = Utc.timestamp_millis_opt(-1000).unwrap();
assert_eq!(dt.to_string(), "1969-12-31 23:59:59 UTC");
let dt = Utc.timestamp_millis_opt(-7000).unwrap();
assert_eq!(dt.to_string(), "1969-12-31 23:59:53 UTC");
let dt = Utc.timestamp_millis_opt(-7001).unwrap();
assert_eq!(dt.to_string(), "1969-12-31 23:59:52.999 UTC");
let dt = Utc.timestamp_millis_opt(-7003).unwrap();
assert_eq!(dt.to_string(), "1969-12-31 23:59:52.997 UTC");
let dt = Utc.timestamp_millis_opt(-999).unwrap();
assert_eq!(dt.to_string(), "1969-12-31 23:59:59.001 UTC");
let dt = Utc.timestamp_millis_opt(-1).unwrap();
assert_eq!(dt.to_string(), "1969-12-31 23:59:59.999 UTC");
let dt = Utc.timestamp_millis_opt(-60000).unwrap();
assert_eq!(dt.to_string(), "1969-12-31 23:59:00 UTC");
let dt = Utc.timestamp_millis_opt(-3600000).unwrap();
assert_eq!(dt.to_string(), "1969-12-31 23:00:00 UTC");
for (millis, expected) in &[
(-7000, "1969-12-31 23:59:53 UTC"),
(-7001, "1969-12-31 23:59:52.999 UTC"),
(-7003, "1969-12-31 23:59:52.997 UTC"),
] {
match Utc.timestamp_millis_opt(*millis) {
MappedLocalTime::Single(dt) => {
assert_eq!(dt.to_string(), *expected);
}
e => panic!("Got {:?} instead of an okay answer", e),
}
}
}
#[test]
fn test_negative_nanos() {
let dt = Utc.timestamp_nanos(-1_000_000_000);
assert_eq!(dt.to_string(), "1969-12-31 23:59:59 UTC");
let dt = Utc.timestamp_nanos(-999_999_999);
assert_eq!(dt.to_string(), "1969-12-31 23:59:59.000000001 UTC");
let dt = Utc.timestamp_nanos(-1);
assert_eq!(dt.to_string(), "1969-12-31 23:59:59.999999999 UTC");
let dt = Utc.timestamp_nanos(-60_000_000_000);
assert_eq!(dt.to_string(), "1969-12-31 23:59:00 UTC");
let dt = Utc.timestamp_nanos(-3_600_000_000_000);
assert_eq!(dt.to_string(), "1969-12-31 23:00:00 UTC");
}
#[test]
fn test_nanos_never_panics() {
Utc.timestamp_nanos(i64::MAX);
Utc.timestamp_nanos(i64::default());
Utc.timestamp_nanos(i64::MIN);
}
#[test]
fn test_negative_micros() {
let dt = Utc.timestamp_micros(-1_000_000).unwrap();
assert_eq!(dt.to_string(), "1969-12-31 23:59:59 UTC");
let dt = Utc.timestamp_micros(-999_999).unwrap();
assert_eq!(dt.to_string(), "1969-12-31 23:59:59.000001 UTC");
let dt = Utc.timestamp_micros(-1).unwrap();
assert_eq!(dt.to_string(), "1969-12-31 23:59:59.999999 UTC");
let dt = Utc.timestamp_micros(-60_000_000).unwrap();
assert_eq!(dt.to_string(), "1969-12-31 23:59:00 UTC");
let dt = Utc.timestamp_micros(-3_600_000_000).unwrap();
assert_eq!(dt.to_string(), "1969-12-31 23:00:00 UTC");
}
}

View File

@@ -0,0 +1,152 @@
// This is a part of Chrono.
// See README.md and LICENSE.txt for details.
//! The UTC (Coordinated Universal Time) time zone.
use core::fmt;
#[cfg(all(
feature = "now",
not(all(
target_arch = "wasm32",
feature = "wasmbind",
not(any(target_os = "emscripten", target_os = "wasi", target_os = "linux"))
))
))]
use std::time::{SystemTime, UNIX_EPOCH};
#[cfg(any(feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"))]
use rkyv::{Archive, Deserialize, Serialize};
use super::{FixedOffset, MappedLocalTime, Offset, TimeZone};
use crate::naive::{NaiveDate, NaiveDateTime};
#[cfg(feature = "now")]
#[allow(deprecated)]
use crate::{Date, DateTime};
/// The UTC time zone. This is the most efficient time zone when you don't need the local time.
/// It is also used as an offset (which is also a dummy type).
///
/// Using the [`TimeZone`](./trait.TimeZone.html) methods
/// on the UTC struct is the preferred way to construct `DateTime<Utc>`
/// instances.
///
/// # Example
///
/// ```
/// use chrono::{DateTime, TimeZone, Utc};
///
/// let dt = DateTime::from_timestamp(61, 0).unwrap();
///
/// assert_eq!(Utc.timestamp_opt(61, 0).unwrap(), dt);
/// assert_eq!(Utc.with_ymd_and_hms(1970, 1, 1, 0, 1, 1).unwrap(), dt);
/// ```
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
#[cfg_attr(
any(feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq)),
rkyv(attr(derive(Clone, Copy, PartialEq, Eq, Debug, Hash)))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
#[cfg_attr(all(feature = "arbitrary", feature = "std"), derive(arbitrary::Arbitrary))]
pub struct Utc;
#[cfg(feature = "now")]
impl Utc {
/// Returns a `Date` which corresponds to the current date.
#[deprecated(
since = "0.4.23",
note = "use `Utc::now()` instead, potentially with `.date_naive()`"
)]
#[allow(deprecated)]
#[must_use]
pub fn today() -> Date<Utc> {
Utc::now().date()
}
/// Returns a `DateTime<Utc>` which corresponds to the current date and time in UTC.
///
/// See also the similar [`Local::now()`] which returns `DateTime<Local>`, i.e. the local date
/// and time including offset from UTC.
///
/// [`Local::now()`]: crate::Local::now
///
/// # Example
///
/// ```
/// # #![allow(unused_variables)]
/// # use chrono::{FixedOffset, Utc};
/// // Current time in UTC
/// let now_utc = Utc::now();
///
/// // Current date in UTC
/// let today_utc = now_utc.date_naive();
///
/// // Current time in some timezone (let's use +05:00)
/// let offset = FixedOffset::east_opt(5 * 60 * 60).unwrap();
/// let now_with_offset = Utc::now().with_timezone(&offset);
/// ```
#[cfg(not(all(
target_arch = "wasm32",
feature = "wasmbind",
not(any(target_os = "emscripten", target_os = "wasi", target_os = "linux"))
)))]
#[must_use]
pub fn now() -> DateTime<Utc> {
let now =
SystemTime::now().duration_since(UNIX_EPOCH).expect("system time before Unix epoch");
DateTime::from_timestamp(now.as_secs() as i64, now.subsec_nanos()).unwrap()
}
/// Returns a `DateTime` which corresponds to the current date and time.
#[cfg(all(
target_arch = "wasm32",
feature = "wasmbind",
not(any(target_os = "emscripten", target_os = "wasi", target_os = "linux"))
))]
#[must_use]
pub fn now() -> DateTime<Utc> {
let now = js_sys::Date::new_0();
DateTime::<Utc>::from(now)
}
}
impl TimeZone for Utc {
type Offset = Utc;
fn from_offset(_state: &Utc) -> Utc {
Utc
}
fn offset_from_local_date(&self, _local: &NaiveDate) -> MappedLocalTime<Utc> {
MappedLocalTime::Single(Utc)
}
fn offset_from_local_datetime(&self, _local: &NaiveDateTime) -> MappedLocalTime<Utc> {
MappedLocalTime::Single(Utc)
}
fn offset_from_utc_date(&self, _utc: &NaiveDate) -> Utc {
Utc
}
fn offset_from_utc_datetime(&self, _utc: &NaiveDateTime) -> Utc {
Utc
}
}
impl Offset for Utc {
fn fix(&self) -> FixedOffset {
FixedOffset::east_opt(0).unwrap()
}
}
impl fmt::Debug for Utc {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Z")
}
}
impl fmt::Display for Utc {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "UTC")
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,415 @@
use crate::{IsoWeek, Month, Weekday};
/// The common set of methods for date component.
///
/// Methods such as [`year`], [`month`], [`day`] and [`weekday`] can be used to get basic
/// information about the date.
///
/// The `with_*` methods can change the date.
///
/// # Warning
///
/// The `with_*` methods can be convenient to change a single component of a date, but they must be
/// used with some care. Examples to watch out for:
///
/// - [`with_year`] changes the year component of a year-month-day value. Don't use this method if
/// you want the ordinal to stay the same after changing the year, of if you want the week and
/// weekday values to stay the same.
/// - Don't combine two `with_*` methods to change two components of the date. For example to
/// change both the year and month components of a date. This could fail because an intermediate
/// value does not exist, while the final date would be valid.
///
/// For more complex changes to a date, it is best to use the methods on [`NaiveDate`] to create a
/// new value instead of altering an existing date.
///
/// [`year`]: Datelike::year
/// [`month`]: Datelike::month
/// [`day`]: Datelike::day
/// [`weekday`]: Datelike::weekday
/// [`with_year`]: Datelike::with_year
/// [`NaiveDate`]: crate::NaiveDate
pub trait Datelike: Sized {
/// Returns the year number in the [calendar date](./naive/struct.NaiveDate.html#calendar-date).
fn year(&self) -> i32;
/// Returns the absolute year number starting from 1 with a boolean flag,
/// which is false when the year predates the epoch (BCE/BC) and true otherwise (CE/AD).
#[inline]
fn year_ce(&self) -> (bool, u32) {
let year = self.year();
if year < 1 { (false, (1 - year) as u32) } else { (true, year as u32) }
}
/// Returns the quarter number starting from 1.
///
/// The return value ranges from 1 to 4.
#[inline]
fn quarter(&self) -> u32 {
(self.month() - 1).div_euclid(3) + 1
}
/// Returns the month number starting from 1.
///
/// The return value ranges from 1 to 12.
fn month(&self) -> u32;
/// Returns the month number starting from 0.
///
/// The return value ranges from 0 to 11.
fn month0(&self) -> u32;
/// Returns the day of month starting from 1.
///
/// The return value ranges from 1 to 31. (The last day of month differs by months.)
fn day(&self) -> u32;
/// Returns the day of month starting from 0.
///
/// The return value ranges from 0 to 30. (The last day of month differs by months.)
fn day0(&self) -> u32;
/// Returns the day of year starting from 1.
///
/// The return value ranges from 1 to 366. (The last day of year differs by years.)
fn ordinal(&self) -> u32;
/// Returns the day of year starting from 0.
///
/// The return value ranges from 0 to 365. (The last day of year differs by years.)
fn ordinal0(&self) -> u32;
/// Returns the day of week.
fn weekday(&self) -> Weekday;
/// Returns the ISO week.
fn iso_week(&self) -> IsoWeek;
/// Makes a new value with the year number changed, while keeping the same month and day.
///
/// This method assumes you want to work on the date as a year-month-day value. Don't use it if
/// you want the ordinal to stay the same after changing the year, of if you want the week and
/// weekday values to stay the same.
///
/// # Errors
///
/// Returns `None` when:
///
/// - The resulting date does not exist (February 29 in a non-leap year).
/// - The year is out of range for [`NaiveDate`].
/// - In case of [`DateTime<Tz>`] if the resulting date and time fall within a timezone
/// transition such as from DST to standard time.
///
/// [`NaiveDate`]: crate::NaiveDate
/// [`DateTime<Tz>`]: crate::DateTime
///
/// # Examples
///
/// ```
/// use chrono::{Datelike, NaiveDate};
///
/// assert_eq!(
/// NaiveDate::from_ymd_opt(2020, 5, 13).unwrap().with_year(2023).unwrap(),
/// NaiveDate::from_ymd_opt(2023, 5, 13).unwrap()
/// );
/// // Resulting date 2023-02-29 does not exist:
/// assert!(NaiveDate::from_ymd_opt(2020, 2, 29).unwrap().with_year(2023).is_none());
///
/// // Don't use `with_year` if you want the ordinal date to stay the same:
/// assert_ne!(
/// NaiveDate::from_yo_opt(2020, 100).unwrap().with_year(2023).unwrap(),
/// NaiveDate::from_yo_opt(2023, 100).unwrap() // result is 2023-101
/// );
/// ```
fn with_year(&self, year: i32) -> Option<Self>;
/// Makes a new value with the month number (starting from 1) changed.
///
/// # Errors
///
/// Returns `None` when:
///
/// - The resulting date does not exist (for example `month(4)` when day of the month is 31).
/// - In case of [`DateTime<Tz>`] if the resulting date and time fall within a timezone
/// transition such as from DST to standard time.
/// - The value for `month` is out of range.
///
/// [`DateTime<Tz>`]: crate::DateTime
///
/// # Examples
///
/// ```
/// use chrono::{Datelike, NaiveDate};
///
/// assert_eq!(
/// NaiveDate::from_ymd_opt(2023, 5, 12).unwrap().with_month(9).unwrap(),
/// NaiveDate::from_ymd_opt(2023, 9, 12).unwrap()
/// );
/// // Resulting date 2023-09-31 does not exist:
/// assert!(NaiveDate::from_ymd_opt(2023, 5, 31).unwrap().with_month(9).is_none());
/// ```
///
/// Don't combine multiple `Datelike::with_*` methods. The intermediate value may not exist.
/// ```
/// use chrono::{Datelike, NaiveDate};
///
/// fn with_year_month(date: NaiveDate, year: i32, month: u32) -> Option<NaiveDate> {
/// date.with_year(year)?.with_month(month)
/// }
/// let d = NaiveDate::from_ymd_opt(2020, 2, 29).unwrap();
/// assert!(with_year_month(d, 2019, 1).is_none()); // fails because of invalid intermediate value
///
/// // Correct version:
/// fn with_year_month_fixed(date: NaiveDate, year: i32, month: u32) -> Option<NaiveDate> {
/// NaiveDate::from_ymd_opt(year, month, date.day())
/// }
/// let d = NaiveDate::from_ymd_opt(2020, 2, 29).unwrap();
/// assert_eq!(with_year_month_fixed(d, 2019, 1), NaiveDate::from_ymd_opt(2019, 1, 29));
/// ```
fn with_month(&self, month: u32) -> Option<Self>;
/// Makes a new value with the month number (starting from 0) changed.
///
/// # Errors
///
/// Returns `None` when:
///
/// - The resulting date does not exist (for example `month0(3)` when day of the month is 31).
/// - In case of [`DateTime<Tz>`] if the resulting date and time fall within a timezone
/// transition such as from DST to standard time.
/// - The value for `month0` is out of range.
///
/// [`DateTime<Tz>`]: crate::DateTime
fn with_month0(&self, month0: u32) -> Option<Self>;
/// Makes a new value with the day of month (starting from 1) changed.
///
/// # Errors
///
/// Returns `None` when:
///
/// - The resulting date does not exist (for example `day(31)` in April).
/// - In case of [`DateTime<Tz>`] if the resulting date and time fall within a timezone
/// transition such as from DST to standard time.
/// - The value for `day` is out of range.
///
/// [`DateTime<Tz>`]: crate::DateTime
fn with_day(&self, day: u32) -> Option<Self>;
/// Makes a new value with the day of month (starting from 0) changed.
///
/// # Errors
///
/// Returns `None` when:
///
/// - The resulting date does not exist (for example `day0(30)` in April).
/// - In case of [`DateTime<Tz>`] if the resulting date and time fall within a timezone
/// transition such as from DST to standard time.
/// - The value for `day0` is out of range.
///
/// [`DateTime<Tz>`]: crate::DateTime
fn with_day0(&self, day0: u32) -> Option<Self>;
/// Makes a new value with the day of year (starting from 1) changed.
///
/// # Errors
///
/// Returns `None` when:
///
/// - The resulting date does not exist (`with_ordinal(366)` in a non-leap year).
/// - In case of [`DateTime<Tz>`] if the resulting date and time fall within a timezone
/// transition such as from DST to standard time.
/// - The value for `ordinal` is out of range.
///
/// [`DateTime<Tz>`]: crate::DateTime
fn with_ordinal(&self, ordinal: u32) -> Option<Self>;
/// Makes a new value with the day of year (starting from 0) changed.
///
/// # Errors
///
/// Returns `None` when:
///
/// - The resulting date does not exist (`with_ordinal0(365)` in a non-leap year).
/// - In case of [`DateTime<Tz>`] if the resulting date and time fall within a timezone
/// transition such as from DST to standard time.
/// - The value for `ordinal0` is out of range.
///
/// [`DateTime<Tz>`]: crate::DateTime
fn with_ordinal0(&self, ordinal0: u32) -> Option<Self>;
/// Counts the days in the proleptic Gregorian calendar, with January 1, Year 1 (CE) as day 1.
///
/// # Examples
///
/// ```
/// use chrono::{Datelike, NaiveDate};
///
/// assert_eq!(NaiveDate::from_ymd_opt(1970, 1, 1).unwrap().num_days_from_ce(), 719_163);
/// assert_eq!(NaiveDate::from_ymd_opt(2, 1, 1).unwrap().num_days_from_ce(), 366);
/// assert_eq!(NaiveDate::from_ymd_opt(1, 1, 1).unwrap().num_days_from_ce(), 1);
/// assert_eq!(NaiveDate::from_ymd_opt(0, 1, 1).unwrap().num_days_from_ce(), -365);
/// ```
fn num_days_from_ce(&self) -> i32 {
// See test_num_days_from_ce_against_alternative_impl below for a more straightforward
// implementation.
// we know this wouldn't overflow since year is limited to 1/2^13 of i32's full range.
let mut year = self.year() - 1;
let mut ndays = 0;
if year < 0 {
let excess = 1 + (-year) / 400;
year += excess * 400;
ndays -= excess * 146_097;
}
let div_100 = year / 100;
ndays += ((year * 1461) >> 2) - div_100 + (div_100 >> 2);
ndays + self.ordinal() as i32
}
/// Get the length in days of the month
fn num_days_in_month(&self) -> u8 {
use num_traits::FromPrimitive;
// The value returned from `self.month()` is guaranteed to be in the
// range [1,12], which will never result in a `None` value here.
let month = Month::from_u32(self.month()).unwrap();
// `Month::num_days` will only return `None` if the provided year is out
// of range. Since we are passing it directly from a verified date, we
// know it is in range, and the result will never be `None`.
month.num_days(self.year()).unwrap()
}
}
/// The common set of methods for time component.
pub trait Timelike: Sized {
/// Returns the hour number from 0 to 23.
fn hour(&self) -> u32;
/// Returns the hour number from 1 to 12 with a boolean flag,
/// which is false for AM and true for PM.
#[inline]
fn hour12(&self) -> (bool, u32) {
let hour = self.hour();
let mut hour12 = hour % 12;
if hour12 == 0 {
hour12 = 12;
}
(hour >= 12, hour12)
}
/// Returns the minute number from 0 to 59.
fn minute(&self) -> u32;
/// Returns the second number from 0 to 59.
fn second(&self) -> u32;
/// Returns the number of nanoseconds since the whole non-leap second.
/// The range from 1,000,000,000 to 1,999,999,999 represents
/// the [leap second](./naive/struct.NaiveTime.html#leap-second-handling).
fn nanosecond(&self) -> u32;
/// Makes a new value with the hour number changed.
///
/// Returns `None` when the resulting value would be invalid.
fn with_hour(&self, hour: u32) -> Option<Self>;
/// Makes a new value with the minute number changed.
///
/// Returns `None` when the resulting value would be invalid.
fn with_minute(&self, min: u32) -> Option<Self>;
/// Makes a new value with the second number changed.
///
/// Returns `None` when the resulting value would be invalid.
/// As with the [`second`](#tymethod.second) method,
/// the input range is restricted to 0 through 59.
fn with_second(&self, sec: u32) -> Option<Self>;
/// Makes a new value with nanoseconds since the whole non-leap second changed.
///
/// Returns `None` when the resulting value would be invalid.
/// As with the [`nanosecond`](#tymethod.nanosecond) method,
/// the input range can exceed 1,000,000,000 for leap seconds.
fn with_nanosecond(&self, nano: u32) -> Option<Self>;
/// Returns the number of non-leap seconds past the last midnight.
///
/// Every value in 00:00:00-23:59:59 maps to an integer in 0-86399.
///
/// This method is not intended to provide the real number of seconds since midnight on a given
/// day. It does not take things like DST transitions into account.
#[inline]
fn num_seconds_from_midnight(&self) -> u32 {
self.hour() * 3600 + self.minute() * 60 + self.second()
}
}
#[cfg(test)]
mod tests {
use super::Datelike;
use crate::{Days, NaiveDate};
/// Tests `Datelike::num_days_from_ce` against an alternative implementation.
///
/// The alternative implementation is not as short as the current one but it is simpler to
/// understand, with less unexplained magic constants.
#[test]
fn test_num_days_from_ce_against_alternative_impl() {
/// Returns the number of multiples of `div` in the range `start..end`.
///
/// If the range `start..end` is back-to-front, i.e. `start` is greater than `end`, the
/// behaviour is defined by the following equation:
/// `in_between(start, end, div) == - in_between(end, start, div)`.
///
/// When `div` is 1, this is equivalent to `end - start`, i.e. the length of `start..end`.
///
/// # Panics
///
/// Panics if `div` is not positive.
fn in_between(start: i32, end: i32, div: i32) -> i32 {
assert!(div > 0, "in_between: nonpositive div = {}", div);
let start = (start.div_euclid(div), start.rem_euclid(div));
let end = (end.div_euclid(div), end.rem_euclid(div));
// The lowest multiple of `div` greater than or equal to `start`, divided.
let start = start.0 + (start.1 != 0) as i32;
// The lowest multiple of `div` greater than or equal to `end`, divided.
let end = end.0 + (end.1 != 0) as i32;
end - start
}
/// Alternative implementation to `Datelike::num_days_from_ce`
fn num_days_from_ce<Date: Datelike>(date: &Date) -> i32 {
let year = date.year();
let diff = move |div| in_between(1, year, div);
// 365 days a year, one more in leap years. In the gregorian calendar, leap years are all
// the multiples of 4 except multiples of 100 but including multiples of 400.
date.ordinal() as i32 + 365 * diff(1) + diff(4) - diff(100) + diff(400)
}
for year in NaiveDate::MIN.year()..=NaiveDate::MAX.year() {
let jan1_year = NaiveDate::from_ymd_opt(year, 1, 1).unwrap();
assert_eq!(
jan1_year.num_days_from_ce(),
num_days_from_ce(&jan1_year),
"on {:?}",
jan1_year
);
let mid_year = jan1_year + Days::new(133);
assert_eq!(
mid_year.num_days_from_ce(),
num_days_from_ce(&mid_year),
"on {:?}",
mid_year
);
}
}
#[test]
fn test_num_days_in_month() {
let feb_leap_year = NaiveDate::from_ymd_opt(2004, 2, 1).unwrap();
assert_eq!(feb_leap_year.num_days_in_month(), 29);
let feb = feb_leap_year.with_year(2005).unwrap();
assert_eq!(feb.num_days_in_month(), 28);
let march = feb.with_month(3).unwrap();
assert_eq!(march.num_days_in_month(), 31);
}
}

View File

@@ -0,0 +1,408 @@
use core::fmt;
#[cfg(any(feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"))]
use rkyv::{Archive, Deserialize, Serialize};
use crate::OutOfRange;
/// The day of week.
///
/// The order of the days of week depends on the context.
/// (This is why this type does *not* implement `PartialOrd` or `Ord` traits.)
/// One should prefer `*_from_monday` or `*_from_sunday` methods to get the correct result.
///
/// # Example
/// ```
/// use chrono::Weekday;
///
/// let monday = "Monday".parse::<Weekday>().unwrap();
/// assert_eq!(monday, Weekday::Mon);
///
/// let sunday = Weekday::try_from(6).unwrap();
/// assert_eq!(sunday, Weekday::Sun);
///
/// assert_eq!(sunday.num_days_from_monday(), 6); // starts counting with Monday = 0
/// assert_eq!(sunday.number_from_monday(), 7); // starts counting with Monday = 1
/// assert_eq!(sunday.num_days_from_sunday(), 0); // starts counting with Sunday = 0
/// assert_eq!(sunday.number_from_sunday(), 1); // starts counting with Sunday = 1
///
/// assert_eq!(sunday.succ(), monday);
/// assert_eq!(sunday.pred(), Weekday::Sat);
/// ```
#[derive(PartialEq, Eq, Copy, Clone, Debug, Hash)]
#[cfg_attr(
any(feature = "rkyv-16", feature = "rkyv-32", feature = "rkyv-64"),
derive(Archive, Deserialize, Serialize),
rkyv(compare(PartialEq)),
rkyv(attr(derive(Clone, Copy, PartialEq, Eq, Debug, Hash)))
)]
#[cfg_attr(feature = "rkyv-validation", archive(check_bytes))]
#[cfg_attr(all(feature = "arbitrary", feature = "std"), derive(arbitrary::Arbitrary))]
pub enum Weekday {
/// Monday.
Mon = 0,
/// Tuesday.
Tue = 1,
/// Wednesday.
Wed = 2,
/// Thursday.
Thu = 3,
/// Friday.
Fri = 4,
/// Saturday.
Sat = 5,
/// Sunday.
Sun = 6,
}
impl Weekday {
/// The next day in the week.
///
/// `w`: | `Mon` | `Tue` | `Wed` | `Thu` | `Fri` | `Sat` | `Sun`
/// ----------- | ----- | ----- | ----- | ----- | ----- | ----- | -----
/// `w.succ()`: | `Tue` | `Wed` | `Thu` | `Fri` | `Sat` | `Sun` | `Mon`
#[inline]
#[must_use]
pub const fn succ(&self) -> Weekday {
match *self {
Weekday::Mon => Weekday::Tue,
Weekday::Tue => Weekday::Wed,
Weekday::Wed => Weekday::Thu,
Weekday::Thu => Weekday::Fri,
Weekday::Fri => Weekday::Sat,
Weekday::Sat => Weekday::Sun,
Weekday::Sun => Weekday::Mon,
}
}
/// The previous day in the week.
///
/// `w`: | `Mon` | `Tue` | `Wed` | `Thu` | `Fri` | `Sat` | `Sun`
/// ----------- | ----- | ----- | ----- | ----- | ----- | ----- | -----
/// `w.pred()`: | `Sun` | `Mon` | `Tue` | `Wed` | `Thu` | `Fri` | `Sat`
#[inline]
#[must_use]
pub const fn pred(&self) -> Weekday {
match *self {
Weekday::Mon => Weekday::Sun,
Weekday::Tue => Weekday::Mon,
Weekday::Wed => Weekday::Tue,
Weekday::Thu => Weekday::Wed,
Weekday::Fri => Weekday::Thu,
Weekday::Sat => Weekday::Fri,
Weekday::Sun => Weekday::Sat,
}
}
/// Returns a day-of-week number starting from Monday = 1. (ISO 8601 weekday number)
///
/// `w`: | `Mon` | `Tue` | `Wed` | `Thu` | `Fri` | `Sat` | `Sun`
/// ------------------------- | ----- | ----- | ----- | ----- | ----- | ----- | -----
/// `w.number_from_monday()`: | 1 | 2 | 3 | 4 | 5 | 6 | 7
#[inline]
pub const fn number_from_monday(&self) -> u32 {
self.days_since(Weekday::Mon) + 1
}
/// Returns a day-of-week number starting from Sunday = 1.
///
/// `w`: | `Mon` | `Tue` | `Wed` | `Thu` | `Fri` | `Sat` | `Sun`
/// ------------------------- | ----- | ----- | ----- | ----- | ----- | ----- | -----
/// `w.number_from_sunday()`: | 2 | 3 | 4 | 5 | 6 | 7 | 1
#[inline]
pub const fn number_from_sunday(&self) -> u32 {
self.days_since(Weekday::Sun) + 1
}
/// Returns a day-of-week number starting from Monday = 0.
///
/// `w`: | `Mon` | `Tue` | `Wed` | `Thu` | `Fri` | `Sat` | `Sun`
/// --------------------------- | ----- | ----- | ----- | ----- | ----- | ----- | -----
/// `w.num_days_from_monday()`: | 0 | 1 | 2 | 3 | 4 | 5 | 6
///
/// # Example
///
/// ```
/// # #[cfg(feature = "clock")] {
/// # use chrono::{Local, Datelike};
/// // MTWRFSU is occasionally used as a single-letter abbreviation of the weekdays.
/// // Use `num_days_from_monday` to index into the array.
/// const MTWRFSU: [char; 7] = ['M', 'T', 'W', 'R', 'F', 'S', 'U'];
///
/// let today = Local::now().weekday();
/// println!("{}", MTWRFSU[today.num_days_from_monday() as usize]);
/// # }
/// ```
#[inline]
pub const fn num_days_from_monday(&self) -> u32 {
self.days_since(Weekday::Mon)
}
/// Returns a day-of-week number starting from Sunday = 0.
///
/// `w`: | `Mon` | `Tue` | `Wed` | `Thu` | `Fri` | `Sat` | `Sun`
/// --------------------------- | ----- | ----- | ----- | ----- | ----- | ----- | -----
/// `w.num_days_from_sunday()`: | 1 | 2 | 3 | 4 | 5 | 6 | 0
#[inline]
pub const fn num_days_from_sunday(&self) -> u32 {
self.days_since(Weekday::Sun)
}
/// The number of days since the given day.
///
/// # Examples
///
/// ```
/// use chrono::Weekday::*;
/// assert_eq!(Mon.days_since(Mon), 0);
/// assert_eq!(Sun.days_since(Tue), 5);
/// assert_eq!(Wed.days_since(Sun), 3);
/// ```
pub const fn days_since(&self, other: Weekday) -> u32 {
let lhs = *self as u32;
let rhs = other as u32;
if lhs < rhs { 7 + lhs - rhs } else { lhs - rhs }
}
}
impl fmt::Display for Weekday {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad(match *self {
Weekday::Mon => "Mon",
Weekday::Tue => "Tue",
Weekday::Wed => "Wed",
Weekday::Thu => "Thu",
Weekday::Fri => "Fri",
Weekday::Sat => "Sat",
Weekday::Sun => "Sun",
})
}
}
/// Any weekday can be represented as an integer from 0 to 6, which equals to
/// [`Weekday::num_days_from_monday`](#method.num_days_from_monday) in this implementation.
/// Do not heavily depend on this though; use explicit methods whenever possible.
impl TryFrom<u8> for Weekday {
type Error = OutOfRange;
fn try_from(value: u8) -> Result<Self, Self::Error> {
match value {
0 => Ok(Weekday::Mon),
1 => Ok(Weekday::Tue),
2 => Ok(Weekday::Wed),
3 => Ok(Weekday::Thu),
4 => Ok(Weekday::Fri),
5 => Ok(Weekday::Sat),
6 => Ok(Weekday::Sun),
_ => Err(OutOfRange::new()),
}
}
}
/// Any weekday can be represented as an integer from 0 to 6, which equals to
/// [`Weekday::num_days_from_monday`](#method.num_days_from_monday) in this implementation.
/// Do not heavily depend on this though; use explicit methods whenever possible.
impl num_traits::FromPrimitive for Weekday {
#[inline]
fn from_i64(n: i64) -> Option<Weekday> {
match n {
0 => Some(Weekday::Mon),
1 => Some(Weekday::Tue),
2 => Some(Weekday::Wed),
3 => Some(Weekday::Thu),
4 => Some(Weekday::Fri),
5 => Some(Weekday::Sat),
6 => Some(Weekday::Sun),
_ => None,
}
}
#[inline]
fn from_u64(n: u64) -> Option<Weekday> {
match n {
0 => Some(Weekday::Mon),
1 => Some(Weekday::Tue),
2 => Some(Weekday::Wed),
3 => Some(Weekday::Thu),
4 => Some(Weekday::Fri),
5 => Some(Weekday::Sat),
6 => Some(Weekday::Sun),
_ => None,
}
}
}
/// An error resulting from reading `Weekday` value with `FromStr`.
#[derive(Clone, PartialEq, Eq)]
pub struct ParseWeekdayError {
pub(crate) _dummy: (),
}
#[cfg(feature = "std")]
impl std::error::Error for ParseWeekdayError {}
impl fmt::Display for ParseWeekdayError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_fmt(format_args!("{self:?}"))
}
}
impl fmt::Debug for ParseWeekdayError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "ParseWeekdayError {{ .. }}")
}
}
// the actual `FromStr` implementation is in the `format` module to leverage the existing code
#[cfg(feature = "serde")]
mod weekday_serde {
use super::Weekday;
use core::fmt;
use serde::{de, ser};
impl ser::Serialize for Weekday {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: ser::Serializer,
{
serializer.collect_str(&self)
}
}
struct WeekdayVisitor;
impl de::Visitor<'_> for WeekdayVisitor {
type Value = Weekday;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("Weekday")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: de::Error,
{
value.parse().map_err(|_| E::custom("short or long weekday names expected"))
}
}
impl<'de> de::Deserialize<'de> for Weekday {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
deserializer.deserialize_str(WeekdayVisitor)
}
}
}
#[cfg(test)]
mod tests {
use super::Weekday;
#[test]
fn test_days_since() {
for i in 0..7 {
let base_day = Weekday::try_from(i).unwrap();
assert_eq!(base_day.num_days_from_monday(), base_day.days_since(Weekday::Mon));
assert_eq!(base_day.num_days_from_sunday(), base_day.days_since(Weekday::Sun));
assert_eq!(base_day.days_since(base_day), 0);
assert_eq!(base_day.days_since(base_day.pred()), 1);
assert_eq!(base_day.days_since(base_day.pred().pred()), 2);
assert_eq!(base_day.days_since(base_day.pred().pred().pred()), 3);
assert_eq!(base_day.days_since(base_day.pred().pred().pred().pred()), 4);
assert_eq!(base_day.days_since(base_day.pred().pred().pred().pred().pred()), 5);
assert_eq!(base_day.days_since(base_day.pred().pred().pred().pred().pred().pred()), 6);
assert_eq!(base_day.days_since(base_day.succ()), 6);
assert_eq!(base_day.days_since(base_day.succ().succ()), 5);
assert_eq!(base_day.days_since(base_day.succ().succ().succ()), 4);
assert_eq!(base_day.days_since(base_day.succ().succ().succ().succ()), 3);
assert_eq!(base_day.days_since(base_day.succ().succ().succ().succ().succ()), 2);
assert_eq!(base_day.days_since(base_day.succ().succ().succ().succ().succ().succ()), 1);
}
}
#[test]
fn test_formatting_alignment() {
// No exhaustive testing here as we just delegate the
// implementation to Formatter::pad. Just some basic smoke
// testing to ensure that it's in fact being done.
assert_eq!(format!("{:x>7}", Weekday::Mon), "xxxxMon");
assert_eq!(format!("{:^7}", Weekday::Mon), " Mon ");
assert_eq!(format!("{:Z<7}", Weekday::Mon), "MonZZZZ");
}
#[test]
#[cfg(feature = "serde")]
fn test_serde_serialize() {
use Weekday::*;
use serde_json::to_string;
let cases: Vec<(Weekday, &str)> = vec![
(Mon, "\"Mon\""),
(Tue, "\"Tue\""),
(Wed, "\"Wed\""),
(Thu, "\"Thu\""),
(Fri, "\"Fri\""),
(Sat, "\"Sat\""),
(Sun, "\"Sun\""),
];
for (weekday, expected_str) in cases {
let string = to_string(&weekday).unwrap();
assert_eq!(string, expected_str);
}
}
#[test]
#[cfg(feature = "serde")]
fn test_serde_deserialize() {
use Weekday::*;
use serde_json::from_str;
let cases: Vec<(&str, Weekday)> = vec![
("\"mon\"", Mon),
("\"MONDAY\"", Mon),
("\"MonDay\"", Mon),
("\"mOn\"", Mon),
("\"tue\"", Tue),
("\"tuesday\"", Tue),
("\"wed\"", Wed),
("\"wednesday\"", Wed),
("\"thu\"", Thu),
("\"thursday\"", Thu),
("\"fri\"", Fri),
("\"friday\"", Fri),
("\"sat\"", Sat),
("\"saturday\"", Sat),
("\"sun\"", Sun),
("\"sunday\"", Sun),
];
for (str, expected_weekday) in cases {
let weekday = from_str::<Weekday>(str).unwrap();
assert_eq!(weekday, expected_weekday);
}
let errors: Vec<&str> =
vec!["\"not a weekday\"", "\"monDAYs\"", "\"mond\"", "mon", "\"thur\"", "\"thurs\""];
for str in errors {
from_str::<Weekday>(str).unwrap_err();
}
}
#[test]
#[cfg(feature = "rkyv-validation")]
fn test_rkyv_validation() {
let mon = Weekday::Mon;
let bytes = rkyv::to_bytes::<_, 1>(&mon).unwrap();
assert_eq!(rkyv::from_bytes::<Weekday>(&bytes).unwrap(), mon);
}
}

View File

@@ -0,0 +1,483 @@
use core::{
fmt::{self, Debug},
iter::FusedIterator,
};
use crate::Weekday;
/// A collection of [`Weekday`]s stored as a single byte.
///
/// This type is `Copy` and provides efficient set-like and slice-like operations.
/// Many operations are `const` as well.
///
/// Implemented as a bitmask where bits 1-7 correspond to Monday-Sunday.
#[derive(Clone, Copy, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
pub struct WeekdaySet(u8); // Invariant: the 8-th bit is always 0.
impl WeekdaySet {
/// Create a `WeekdaySet` from an array of [`Weekday`]s.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// assert_eq!(WeekdaySet::EMPTY, WeekdaySet::from_array([]));
/// assert_eq!(WeekdaySet::single(Mon), WeekdaySet::from_array([Mon]));
/// assert_eq!(WeekdaySet::ALL, WeekdaySet::from_array([Mon, Tue, Wed, Thu, Fri, Sat, Sun]));
/// ```
pub const fn from_array<const C: usize>(days: [Weekday; C]) -> Self {
let mut acc = Self::EMPTY;
let mut idx = 0;
while idx < days.len() {
acc.0 |= Self::single(days[idx]).0;
idx += 1;
}
acc
}
/// Create a `WeekdaySet` from a single [`Weekday`].
pub const fn single(weekday: Weekday) -> Self {
match weekday {
Weekday::Mon => Self(0b000_0001),
Weekday::Tue => Self(0b000_0010),
Weekday::Wed => Self(0b000_0100),
Weekday::Thu => Self(0b000_1000),
Weekday::Fri => Self(0b001_0000),
Weekday::Sat => Self(0b010_0000),
Weekday::Sun => Self(0b100_0000),
}
}
/// Returns `Some(day)` if this collection contains exactly one day.
///
/// Returns `None` otherwise.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// assert_eq!(WeekdaySet::single(Mon).single_day(), Some(Mon));
/// assert_eq!(WeekdaySet::from_array([Mon, Tue]).single_day(), None);
/// assert_eq!(WeekdaySet::EMPTY.single_day(), None);
/// assert_eq!(WeekdaySet::ALL.single_day(), None);
/// ```
pub const fn single_day(self) -> Option<Weekday> {
match self {
Self(0b000_0001) => Some(Weekday::Mon),
Self(0b000_0010) => Some(Weekday::Tue),
Self(0b000_0100) => Some(Weekday::Wed),
Self(0b000_1000) => Some(Weekday::Thu),
Self(0b001_0000) => Some(Weekday::Fri),
Self(0b010_0000) => Some(Weekday::Sat),
Self(0b100_0000) => Some(Weekday::Sun),
_ => None,
}
}
/// Adds a day to the collection.
///
/// Returns `true` if the day was new to the collection.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// let mut weekdays = WeekdaySet::single(Mon);
/// assert!(weekdays.insert(Tue));
/// assert!(!weekdays.insert(Tue));
/// ```
pub fn insert(&mut self, day: Weekday) -> bool {
if self.contains(day) {
return false;
}
self.0 |= Self::single(day).0;
true
}
/// Removes a day from the collection.
///
/// Returns `true` if the collection did contain the day.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// let mut weekdays = WeekdaySet::single(Mon);
/// assert!(weekdays.remove(Mon));
/// assert!(!weekdays.remove(Mon));
/// ```
pub fn remove(&mut self, day: Weekday) -> bool {
if self.contains(day) {
self.0 &= !Self::single(day).0;
return true;
}
false
}
/// Returns `true` if `other` contains all days in `self`.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// assert!(WeekdaySet::single(Mon).is_subset(WeekdaySet::ALL));
/// assert!(!WeekdaySet::single(Mon).is_subset(WeekdaySet::EMPTY));
/// assert!(WeekdaySet::EMPTY.is_subset(WeekdaySet::single(Mon)));
/// ```
pub const fn is_subset(self, other: Self) -> bool {
self.intersection(other).0 == self.0
}
/// Returns days that are in both `self` and `other`.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// assert_eq!(WeekdaySet::single(Mon).intersection(WeekdaySet::single(Mon)), WeekdaySet::single(Mon));
/// assert_eq!(WeekdaySet::single(Mon).intersection(WeekdaySet::single(Tue)), WeekdaySet::EMPTY);
/// assert_eq!(WeekdaySet::ALL.intersection(WeekdaySet::single(Mon)), WeekdaySet::single(Mon));
/// assert_eq!(WeekdaySet::ALL.intersection(WeekdaySet::EMPTY), WeekdaySet::EMPTY);
/// ```
pub const fn intersection(self, other: Self) -> Self {
Self(self.0 & other.0)
}
/// Returns days that are in either `self` or `other`.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// assert_eq!(WeekdaySet::single(Mon).union(WeekdaySet::single(Mon)), WeekdaySet::single(Mon));
/// assert_eq!(WeekdaySet::single(Mon).union(WeekdaySet::single(Tue)), WeekdaySet::from_array([Mon, Tue]));
/// assert_eq!(WeekdaySet::ALL.union(WeekdaySet::single(Mon)), WeekdaySet::ALL);
/// assert_eq!(WeekdaySet::ALL.union(WeekdaySet::EMPTY), WeekdaySet::ALL);
/// ```
pub const fn union(self, other: Self) -> Self {
Self(self.0 | other.0)
}
/// Returns days that are in `self` or `other` but not in both.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// assert_eq!(WeekdaySet::single(Mon).symmetric_difference(WeekdaySet::single(Mon)), WeekdaySet::EMPTY);
/// assert_eq!(WeekdaySet::single(Mon).symmetric_difference(WeekdaySet::single(Tue)), WeekdaySet::from_array([Mon, Tue]));
/// assert_eq!(
/// WeekdaySet::ALL.symmetric_difference(WeekdaySet::single(Mon)),
/// WeekdaySet::from_array([Tue, Wed, Thu, Fri, Sat, Sun]),
/// );
/// assert_eq!(WeekdaySet::ALL.symmetric_difference(WeekdaySet::EMPTY), WeekdaySet::ALL);
/// ```
pub const fn symmetric_difference(self, other: Self) -> Self {
Self(self.0 ^ other.0)
}
/// Returns days that are in `self` but not in `other`.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// assert_eq!(WeekdaySet::single(Mon).difference(WeekdaySet::single(Mon)), WeekdaySet::EMPTY);
/// assert_eq!(WeekdaySet::single(Mon).difference(WeekdaySet::single(Tue)), WeekdaySet::single(Mon));
/// assert_eq!(WeekdaySet::EMPTY.difference(WeekdaySet::single(Mon)), WeekdaySet::EMPTY);
/// ```
pub const fn difference(self, other: Self) -> Self {
Self(self.0 & !other.0)
}
/// Get the first day in the collection, starting from Monday.
///
/// Returns `None` if the collection is empty.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// assert_eq!(WeekdaySet::single(Mon).first(), Some(Mon));
/// assert_eq!(WeekdaySet::single(Tue).first(), Some(Tue));
/// assert_eq!(WeekdaySet::ALL.first(), Some(Mon));
/// assert_eq!(WeekdaySet::EMPTY.first(), None);
/// ```
pub const fn first(self) -> Option<Weekday> {
if self.is_empty() {
return None;
}
// Find the first non-zero bit.
let bit = 1 << self.0.trailing_zeros();
Self(bit).single_day()
}
/// Get the last day in the collection, starting from Sunday.
///
/// Returns `None` if the collection is empty.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// assert_eq!(WeekdaySet::single(Mon).last(), Some(Mon));
/// assert_eq!(WeekdaySet::single(Sun).last(), Some(Sun));
/// assert_eq!(WeekdaySet::from_array([Mon, Tue]).last(), Some(Tue));
/// assert_eq!(WeekdaySet::EMPTY.last(), None);
/// ```
pub fn last(self) -> Option<Weekday> {
if self.is_empty() {
return None;
}
// Find the last non-zero bit.
let bit = 1 << (7 - self.0.leading_zeros());
Self(bit).single_day()
}
/// Split the collection in two at the given day.
///
/// Returns a tuple `(before, after)`. `before` contains all days starting from Monday
/// up to but __not__ including `weekday`. `after` contains all days starting from `weekday`
/// up to and including Sunday.
const fn split_at(self, weekday: Weekday) -> (Self, Self) {
let days_after = 0b1000_0000 - Self::single(weekday).0;
let days_before = days_after ^ 0b0111_1111;
(Self(self.0 & days_before), Self(self.0 & days_after))
}
/// Iterate over the [`Weekday`]s in the collection starting from a given day.
///
/// Wraps around from Sunday to Monday if necessary.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// let weekdays = WeekdaySet::from_array([Mon, Wed, Fri]);
/// let mut iter = weekdays.iter(Wed);
/// assert_eq!(iter.next(), Some(Wed));
/// assert_eq!(iter.next(), Some(Fri));
/// assert_eq!(iter.next(), Some(Mon));
/// assert_eq!(iter.next(), None);
/// ```
pub const fn iter(self, start: Weekday) -> WeekdaySetIter {
WeekdaySetIter { days: self, start }
}
/// Returns `true` if the collection contains the given day.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// assert!(WeekdaySet::single(Mon).contains(Mon));
/// assert!(WeekdaySet::from_array([Mon, Tue]).contains(Tue));
/// assert!(!WeekdaySet::single(Mon).contains(Tue));
/// ```
pub const fn contains(self, day: Weekday) -> bool {
self.0 & Self::single(day).0 != 0
}
/// Returns `true` if the collection is empty.
///
/// # Example
/// ```
/// # use chrono::{Weekday, WeekdaySet};
/// assert!(WeekdaySet::EMPTY.is_empty());
/// assert!(!WeekdaySet::single(Weekday::Mon).is_empty());
/// ```
pub const fn is_empty(self) -> bool {
self.len() == 0
}
/// Returns the number of days in the collection.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// assert_eq!(WeekdaySet::single(Mon).len(), 1);
/// assert_eq!(WeekdaySet::from_array([Mon, Wed, Fri]).len(), 3);
/// assert_eq!(WeekdaySet::ALL.len(), 7);
/// ```
pub const fn len(self) -> u8 {
self.0.count_ones() as u8
}
/// An empty `WeekdaySet`.
pub const EMPTY: Self = Self(0b000_0000);
/// A `WeekdaySet` containing all seven `Weekday`s.
pub const ALL: Self = Self(0b111_1111);
}
/// Print the underlying bitmask, padded to 7 bits.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// assert_eq!(format!("{:?}", WeekdaySet::single(Mon)), "WeekdaySet(0000001)");
/// assert_eq!(format!("{:?}", WeekdaySet::single(Tue)), "WeekdaySet(0000010)");
/// assert_eq!(format!("{:?}", WeekdaySet::ALL), "WeekdaySet(1111111)");
/// ```
impl Debug for WeekdaySet {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "WeekdaySet({:0>7b})", self.0)
}
}
/// An iterator over a collection of weekdays, starting from a given day.
///
/// See [`WeekdaySet::iter()`].
#[derive(Debug, Clone)]
pub struct WeekdaySetIter {
days: WeekdaySet,
start: Weekday,
}
impl Iterator for WeekdaySetIter {
type Item = Weekday;
fn next(&mut self) -> Option<Self::Item> {
if self.days.is_empty() {
return None;
}
// Split the collection in two at `start`.
// Look for the first day among the days after `start` first, including `start` itself.
// If there are no days after `start`, look for the first day among the days before `start`.
let (before, after) = self.days.split_at(self.start);
let days = if after.is_empty() { before } else { after };
let next = days.first().expect("the collection is not empty");
self.days.remove(next);
Some(next)
}
}
impl DoubleEndedIterator for WeekdaySetIter {
fn next_back(&mut self) -> Option<Self::Item> {
if self.days.is_empty() {
return None;
}
// Split the collection in two at `start`.
// Look for the last day among the days before `start` first, NOT including `start` itself.
// If there are no days before `start`, look for the last day among the days after `start`.
let (before, after) = self.days.split_at(self.start);
let days = if before.is_empty() { after } else { before };
let next_back = days.last().expect("the collection is not empty");
self.days.remove(next_back);
Some(next_back)
}
}
impl ExactSizeIterator for WeekdaySetIter {
fn len(&self) -> usize {
self.days.len().into()
}
}
impl FusedIterator for WeekdaySetIter {}
/// Print the collection as a slice-like list of weekdays.
///
/// # Example
/// ```
/// # use chrono::WeekdaySet;
/// use chrono::Weekday::*;
/// assert_eq!("[]", WeekdaySet::EMPTY.to_string());
/// assert_eq!("[Mon]", WeekdaySet::single(Mon).to_string());
/// assert_eq!("[Mon, Fri, Sun]", WeekdaySet::from_array([Mon, Fri, Sun]).to_string());
/// ```
impl fmt::Display for WeekdaySet {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
write!(f, "[")?;
let mut iter = self.iter(Weekday::Mon);
if let Some(first) = iter.next() {
write!(f, "{first}")?;
}
for weekday in iter {
write!(f, ", {weekday}")?;
}
write!(f, "]")
}
}
impl FromIterator<Weekday> for WeekdaySet {
fn from_iter<T: IntoIterator<Item = Weekday>>(iter: T) -> Self {
iter.into_iter().map(Self::single).fold(Self::EMPTY, Self::union)
}
}
#[cfg(test)]
mod tests {
use crate::Weekday;
use super::WeekdaySet;
impl WeekdaySet {
/// Iterate over all 128 possible sets, from `EMPTY` to `ALL`.
fn iter_all() -> impl Iterator<Item = Self> {
(0b0000_0000..0b1000_0000).map(Self)
}
}
/// Panics if the 8-th bit of `self` is not 0.
fn assert_8th_bit_invariant(days: WeekdaySet) {
assert!(days.0 & 0b1000_0000 == 0, "the 8-th bit of {days:?} is not 0");
}
#[test]
fn debug_prints_8th_bit_if_not_zero() {
assert_eq!(format!("{:?}", WeekdaySet(0b1000_0000)), "WeekdaySet(10000000)");
}
#[test]
fn bitwise_set_operations_preserve_8th_bit_invariant() {
for set1 in WeekdaySet::iter_all() {
for set2 in WeekdaySet::iter_all() {
assert_8th_bit_invariant(set1.union(set2));
assert_8th_bit_invariant(set1.intersection(set2));
assert_8th_bit_invariant(set1.symmetric_difference(set2));
}
}
}
/// Test `split_at` on all possible arguments.
#[test]
fn split_at_is_equivalent_to_iterating() {
use Weekday::*;
// `split_at()` is used in `iter()`, so we must not iterate
// over all days with `WeekdaySet::ALL.iter(Mon)`.
const WEEK: [Weekday; 7] = [Mon, Tue, Wed, Thu, Fri, Sat, Sun];
for weekdays in WeekdaySet::iter_all() {
for split_day in WEEK {
let expected_before: WeekdaySet = WEEK
.into_iter()
.take_while(|&day| day != split_day)
.filter(|&day| weekdays.contains(day))
.collect();
let expected_after: WeekdaySet = WEEK
.into_iter()
.skip_while(|&day| day != split_day)
.filter(|&day| weekdays.contains(day))
.collect();
assert_eq!(
(expected_before, expected_after),
weekdays.split_at(split_day),
"split_at({split_day}) failed for {weekdays}",
);
}
}
}
}

View File

@@ -0,0 +1,165 @@
#![cfg(all(unix, feature = "clock", feature = "std"))]
use std::{path, process, thread};
#[cfg(target_os = "linux")]
use chrono::Days;
use chrono::{Datelike, Local, NaiveDate, NaiveDateTime, NaiveTime, TimeZone, Timelike};
fn verify_against_date_command_local(path: &'static str, dt: NaiveDateTime) {
let output = process::Command::new(path)
.arg("-d")
.arg(format!("{}-{:02}-{:02} {:02}:05:01", dt.year(), dt.month(), dt.day(), dt.hour()))
.arg("+%Y-%m-%d %H:%M:%S %:z")
.output()
.unwrap();
let date_command_str = String::from_utf8(output.stdout).unwrap();
// The below would be preferred. At this stage neither earliest() or latest()
// seems to be consistent with the output of the `date` command, so we simply
// compare both.
// let local = Local
// .with_ymd_and_hms(year, month, day, hour, 5, 1)
// // looks like the "date" command always returns a given time when it is ambiguous
// .earliest();
// if let Some(local) = local {
// assert_eq!(format!("{}\n", local), date_command_str);
// } else {
// // we are in a "Spring forward gap" due to DST, and so date also returns ""
// assert_eq!("", date_command_str);
// }
// This is used while a decision is made whether the `date` output needs to
// be exactly matched, or whether MappedLocalTime::Ambiguous should be handled
// differently
let date = NaiveDate::from_ymd_opt(dt.year(), dt.month(), dt.day()).unwrap();
match Local.from_local_datetime(&date.and_hms_opt(dt.hour(), 5, 1).unwrap()) {
chrono::MappedLocalTime::Ambiguous(a, b) => assert!(
format!("{}\n", a) == date_command_str || format!("{}\n", b) == date_command_str
),
chrono::MappedLocalTime::Single(a) => {
assert_eq!(format!("{}\n", a), date_command_str);
}
chrono::MappedLocalTime::None => {
assert_eq!("", date_command_str);
}
}
}
/// path to Unix `date` command. Should work on most Linux and Unixes. Not the
/// path for MacOS (/bin/date) which uses a different version of `date` with
/// different arguments (so it won't run which is okay).
/// for testing only
#[allow(dead_code)]
#[cfg(not(target_os = "aix"))]
const DATE_PATH: &str = "/usr/bin/date";
#[allow(dead_code)]
#[cfg(target_os = "aix")]
const DATE_PATH: &str = "/opt/freeware/bin/date";
#[cfg(test)]
/// test helper to sanity check the date command behaves as expected
/// asserts the command succeeded
fn assert_run_date_version() {
// note environment variable `LANG`
match std::env::var_os("LANG") {
Some(lang) => eprintln!("LANG: {:?}", lang),
None => eprintln!("LANG not set"),
}
let out = process::Command::new(DATE_PATH).arg("--version").output().unwrap();
let stdout = String::from_utf8(out.stdout).unwrap();
let stderr = String::from_utf8(out.stderr).unwrap();
// note the `date` binary version
eprintln!("command: {:?} --version\nstdout: {:?}\nstderr: {:?}", DATE_PATH, stdout, stderr);
assert!(out.status.success(), "command failed: {:?} --version", DATE_PATH);
}
#[test]
fn try_verify_against_date_command() {
if !path::Path::new(DATE_PATH).exists() {
eprintln!("date command {:?} not found, skipping", DATE_PATH);
return;
}
assert_run_date_version();
eprintln!(
"Run command {:?} for every hour from 1975 to 2077, skipping some years...",
DATE_PATH,
);
let mut children = vec![];
for year in [1975, 1976, 1977, 2020, 2021, 2022, 2073, 2074, 2075, 2076, 2077].iter() {
children.push(thread::spawn(|| {
let mut date = NaiveDate::from_ymd_opt(*year, 1, 1).unwrap().and_time(NaiveTime::MIN);
let end = NaiveDate::from_ymd_opt(*year + 1, 1, 1).unwrap().and_time(NaiveTime::MIN);
while date <= end {
verify_against_date_command_local(DATE_PATH, date);
date += chrono::TimeDelta::try_hours(1).unwrap();
}
}));
}
for child in children {
// Wait for the thread to finish. Returns a result.
let _ = child.join();
}
}
#[cfg(target_os = "linux")]
fn verify_against_date_command_format_local(path: &'static str, dt: NaiveDateTime) {
let required_format =
"d%d D%D F%F H%H I%I j%j k%k l%l m%m M%M q%q S%S T%T u%u U%U w%w W%W X%X y%y Y%Y z%:z";
// a%a - depends from localization
// A%A - depends from localization
// b%b - depends from localization
// B%B - depends from localization
// h%h - depends from localization
// c%c - depends from localization
// p%p - depends from localization
// r%r - depends from localization
// x%x - fails, date is dd/mm/yyyy, chrono is dd/mm/yy, same as %D
// Z%Z - too many ways to represent it, will most likely fail
let output = process::Command::new(path)
.env("LANG", "c")
.env("LC_ALL", "c")
.arg("-d")
.arg(format!(
"{}-{:02}-{:02} {:02}:{:02}:{:02}",
dt.year(),
dt.month(),
dt.day(),
dt.hour(),
dt.minute(),
dt.second()
))
.arg(format!("+{}", required_format))
.output()
.unwrap();
let date_command_str = String::from_utf8(output.stdout).unwrap();
let date = NaiveDate::from_ymd_opt(dt.year(), dt.month(), dt.day()).unwrap();
let ldt = Local
.from_local_datetime(&date.and_hms_opt(dt.hour(), dt.minute(), dt.second()).unwrap())
.unwrap();
let formatted_date = format!("{}\n", ldt.format(required_format));
assert_eq!(date_command_str, formatted_date);
}
#[test]
#[cfg(target_os = "linux")]
fn try_verify_against_date_command_format() {
if !path::Path::new(DATE_PATH).exists() {
eprintln!("date command {:?} not found, skipping", DATE_PATH);
return;
}
assert_run_date_version();
let mut date = NaiveDate::from_ymd_opt(1970, 1, 1).unwrap().and_hms_opt(12, 11, 13).unwrap();
while date.year() < 2008 {
verify_against_date_command_format_local(DATE_PATH, date);
date = date + Days::new(55);
}
}

View File

@@ -0,0 +1,89 @@
//! Run this test with:
//! `env TZ="$(date +%z)" NOW="$(date +%s)" wasm-pack test --node -- --features wasmbind`
//!
//! The `TZ` and `NOW` variables are used to compare the results inside the WASM environment with
//! the host system.
//! The check will fail if the local timezone does not match one of the timezones defined below.
#![cfg(all(
target_arch = "wasm32",
feature = "wasmbind",
feature = "clock",
not(any(target_os = "emscripten", target_os = "wasi"))
))]
use chrono::prelude::*;
use wasm_bindgen_test::*;
#[wasm_bindgen_test]
fn now() {
let utc: DateTime<Utc> = Utc::now();
let local: DateTime<Local> = Local::now();
// Ensure time set by the test script is correct
let now = env!("NOW");
let actual = NaiveDateTime::parse_from_str(&now, "%s").unwrap().and_utc();
let diff = utc - actual;
assert!(
diff < chrono::TimeDelta::try_minutes(5).unwrap(),
"expected {} - {} == {} < 5m (env var: {})",
utc,
actual,
diff,
now,
);
let tz = env!("TZ");
eprintln!("testing with tz={}", tz);
// Ensure offset retrieved when getting local time is correct
let expected_offset = match tz {
"ACST-9:30" => FixedOffset::east_opt(19 * 30 * 60).unwrap(),
"Asia/Katmandu" => FixedOffset::east_opt(23 * 15 * 60).unwrap(), // No DST thankfully
"EDT" | "EST4" | "-0400" => FixedOffset::east_opt(-4 * 60 * 60).unwrap(),
"EST" | "-0500" => FixedOffset::east_opt(-5 * 60 * 60).unwrap(),
"UTC0" | "+0000" => FixedOffset::east_opt(0).unwrap(),
tz => panic!("unexpected TZ {}", tz),
};
assert_eq!(
&expected_offset,
local.offset(),
"expected: {:?} local: {:?}",
expected_offset,
local.offset(),
);
}
#[wasm_bindgen_test]
fn from_is_exact() {
let now = js_sys::Date::new_0();
let dt = DateTime::<Utc>::from(now.clone());
assert_eq!(now.get_time() as i64, dt.timestamp_millis());
}
#[wasm_bindgen_test]
fn local_from_local_datetime() {
let now = Local::now();
let ndt = now.naive_local();
let res = match Local.from_local_datetime(&ndt).single() {
Some(v) => v,
None => panic! {"Required for test!"},
};
assert_eq!(now, res);
}
#[wasm_bindgen_test]
fn convert_all_parts_with_milliseconds() {
let time: DateTime<Utc> = "2020-12-01T03:01:55.974Z".parse().unwrap();
let js_date = js_sys::Date::from(time);
assert_eq!(js_date.get_utc_full_year(), 2020);
assert_eq!(js_date.get_utc_month(), 11); // months are numbered 0..=11
assert_eq!(js_date.get_utc_date(), 1);
assert_eq!(js_date.get_utc_hours(), 3);
assert_eq!(js_date.get_utc_minutes(), 1);
assert_eq!(js_date.get_utc_seconds(), 55);
assert_eq!(js_date.get_utc_milliseconds(), 974);
}

View File

@@ -0,0 +1,28 @@
#![cfg(all(windows, feature = "clock", feature = "std"))]
use std::fs;
use windows_bindgen::bindgen;
#[test]
fn gen_bindings() {
let input = "src/offset/local/win_bindings.txt";
let output = "src/offset/local/win_bindings.rs";
let existing = fs::read_to_string(output).unwrap();
bindgen(["--no-deps", "--etc", input]).unwrap();
// Check the output is the same as before.
// Depending on the git configuration the file may have been checked out with `\r\n` newlines or
// with `\n`. Compare line-by-line to ignore this difference.
let mut new = fs::read_to_string(output).unwrap();
if existing.contains("\r\n") && !new.contains("\r\n") {
new = new.replace("\n", "\r\n");
} else if !existing.contains("\r\n") && new.contains("\r\n") {
new = new.replace("\r\n", "\n");
}
similar_asserts::assert_eq!(existing, new);
if !new.lines().eq(existing.lines()) {
panic!("generated file `{output}` is changed.");
}
}

View File

@@ -0,0 +1,35 @@
[package]
name = "dotenvy"
version = "0.15.7"
authors = [
"Noemi Lapresta <noemi.lapresta@gmail.com>",
"Craig Hills <chills@gmail.com>",
"Mike Piccolo <mfpiccolo@gmail.com>",
"Alice Maz <alice@alicemaz.com>",
"Sean Griffin <sean@seantheprogrammer.com>",
"Adam Sharp <adam@sharplet.me>",
"Arpad Borsos <arpad.borsos@googlemail.com>",
"Allan Zhang <al@ayz.ai>",
]
description = "A well-maintained fork of the dotenv crate"
homepage = "https://github.com/allan2/dotenvy"
readme = "README.md"
keywords = ["dotenv", "env", "environment", "settings", "config"]
license = "MIT"
repository = "https://github.com/allan2/dotenvy"
edition = "2018"
rust-version = "1.56.1"
# [[bin]]
# name = "dotenvy"
# required-features = ["cli"]
# [dependencies]
# clap = { version = "3.2", optional = true }
# [dev-dependencies]
# tempfile = "3.3.0"
# once_cell = "1.16.0"
# [features]
# cli = ["clap"]

View File

@@ -0,0 +1,21 @@
# The MIT License (MIT)
Copyright (c) 2014 Santiago Lapresta and contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@@ -0,0 +1,80 @@
# dotenvy
[![Crates.io](https://img.shields.io/crates/v/dotenvy.svg)](https://crates.io/crates/dotenvy)
[![msrv
1.56.1](https://img.shields.io/badge/msrv-1.56.1-dea584.svg?logo=rust)](https://github.com/rust-lang/rust/releases/tag/1.56.1)
[![ci](https://github.com/allan2/dotenvy/actions/workflows/ci.yml/badge.svg)](https://github.com/allan2/dotenvy/actions/workflows/ci.yml)
[![docs](https://img.shields.io/docsrs/dotenvy?logo=docs.rs)](https://docs.rs/dotenvy/)
A well-maintained fork of the [dotenv](https://github.com/dotenv-rs/dotenv) crate.
This crate is the suggested alternative for `dotenv` in security advisory [RUSTSEC-2021-0141](https://rustsec.org/advisories/RUSTSEC-2021-0141.html).
This library loads environment variables from a _.env_ file. This is convenient for dev environments.
## Components
1. [`dotenvy`](https://crates.io/crates/dotenvy) crate - A well-maintained fork of the `dotenv` crate.
2. [`dotenvy_macro`](https://crates.io/crates/dotenvy_macro) crate - A macro for compile time dotenv inspection. This is a fork of `dotenv_codegen`.
3. `dotenvy` CLI tool for running a command using the environment from a _.env_ file (currently Unix only)
## Usage
### Loading at runtime
```rust
use std::env;
use std::error::Error;
fn main() -> Result<(), Box<dyn Error>> {
// Load environment variables from .env file.
// Fails if .env file not found, not readable or invalid.
dotenvy::dotenv()?;
for (key, value) in env::vars() {
println!("{key}: {value}");
}
Ok(())
}
```
### Loading at compile time
The `dotenv!` macro provided by `dotenvy_macro` crate can be used.
Warning: there is an outstanding issue with rust-analyzer ([rust-analyzer #9606](https://github.com/rust-analyzer/rust-analyzer/issues/9606)) related to the `dotenv!` macro
## Minimum supported Rust version
Currently: **1.56.1**
We aim to support the latest 8 rustc versions - approximately 1 year. Increasing
MSRV is _not_ considered a semver-breaking change.
## Why does this fork exist?
The original dotenv crate has not been updated since June 26, 2020. Attempts to reach the authors and present maintainer were not successful ([dotenv-rs/dotenv #74](https://github.com/dotenv-rs/dotenv/issues/74)).
This fork intends to serve as the development home for the dotenv implementation in Rust.
## What are the differences from the original?
This repo fixes:
- more helpful errors for `dotenv!` ([dotenv-rs/dotenv #57](https://github.com/dotenv-rs/dotenv/pull/57))
It also adds:
- multiline support for environment variable values
- `io::Read` support via [`from_read`](https://docs.rs/dotenvy/latest/dotenvy/fn.from_read.html) and [`from_read_iter`](https://docs.rs/dotenvy/latest/dotenvy/fn.from_read_iter.html)
- override support via [`dotenv_override`], [`from_filename_override`], [`from_path_override`] and [`from_read_override`]
- improved docs
For a full list of changes, refer to the [changelog](./CHANGELOG.md).
## The legend
Legend has it that the Lost Maintainer will return, merging changes from `dotenvy` into `dotenv` with such thrust that all `Cargo.toml`s will lose one keystroke. Only then shall the Rust dotenv crateverse be united in true harmony.
Until then, this repo dutifully carries on the dotenv torch. It is actively maintained. Contributions and PRs are very welcome!

View File

@@ -0,0 +1,122 @@
use std::env;
use std::error;
use std::fmt;
use std::io;
pub type Result<T> = std::result::Result<T, Error>;
#[derive(Debug)]
#[non_exhaustive]
pub enum Error {
LineParse(String, usize),
Io(io::Error),
EnvVar(env::VarError),
}
impl Error {
pub fn not_found(&self) -> bool {
if let Error::Io(ref io_error) = *self {
return io_error.kind() == io::ErrorKind::NotFound;
}
false
}
}
impl error::Error for Error {
fn source(&self) -> Option<&(dyn error::Error + 'static)> {
match self {
Error::Io(err) => Some(err),
Error::EnvVar(err) => Some(err),
_ => None,
}
}
}
impl fmt::Display for Error {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::Io(err) => write!(fmt, "{}", err),
Error::EnvVar(err) => write!(fmt, "{}", err),
Error::LineParse(line, error_index) => write!(
fmt,
"Error parsing line: '{}', error at line index: {}",
line, error_index
),
}
}
}
#[cfg(test)]
mod test {
use std::env;
use std::error::Error as StdError;
use std::io;
use super::*;
#[test]
fn test_io_error_source() {
let err = Error::Io(io::ErrorKind::PermissionDenied.into());
let io_err = err.source().unwrap().downcast_ref::<io::Error>().unwrap();
assert_eq!(io::ErrorKind::PermissionDenied, io_err.kind());
}
#[test]
fn test_envvar_error_source() {
let err = Error::EnvVar(env::VarError::NotPresent);
let var_err = err
.source()
.unwrap()
.downcast_ref::<env::VarError>()
.unwrap();
assert_eq!(&env::VarError::NotPresent, var_err);
}
#[test]
fn test_lineparse_error_source() {
let err = Error::LineParse("test line".to_string(), 2);
assert!(err.source().is_none());
}
#[test]
fn test_error_not_found_true() {
let err = Error::Io(io::ErrorKind::NotFound.into());
assert!(err.not_found());
}
#[test]
fn test_error_not_found_false() {
let err = Error::Io(io::ErrorKind::PermissionDenied.into());
assert!(!err.not_found());
}
#[test]
fn test_io_error_display() {
let err = Error::Io(io::ErrorKind::PermissionDenied.into());
let io_err: io::Error = io::ErrorKind::PermissionDenied.into();
let err_desc = format!("{}", err);
let io_err_desc = format!("{}", io_err);
assert_eq!(io_err_desc, err_desc);
}
#[test]
fn test_envvar_error_display() {
let err = Error::EnvVar(env::VarError::NotPresent);
let var_err = env::VarError::NotPresent;
let err_desc = format!("{}", err);
let var_err_desc = format!("{}", var_err);
assert_eq!(var_err_desc, err_desc);
}
#[test]
fn test_lineparse_error_display() {
let err = Error::LineParse("test line".to_string(), 2);
let err_desc = format!("{}", err);
assert_eq!(
"Error parsing line: 'test line', error at line index: 2",
err_desc
);
}
}

View File

@@ -0,0 +1,57 @@
use std::fs::File;
use std::path::{Path, PathBuf};
use std::{env, fs, io};
use crate::errors::*;
use crate::iter::Iter;
pub struct Finder<'a> {
filename: &'a Path,
}
impl<'a> Finder<'a> {
pub fn new() -> Self {
Finder {
filename: Path::new(".env"),
}
}
pub fn filename(mut self, filename: &'a Path) -> Self {
self.filename = filename;
self
}
pub fn find(self) -> Result<(PathBuf, Iter<File>)> {
let path = find(&env::current_dir().map_err(Error::Io)?, self.filename)?;
let file = File::open(&path).map_err(Error::Io)?;
let iter = Iter::new(file);
Ok((path, iter))
}
}
/// Searches for `filename` in `directory` and parent directories until found or root is reached.
pub fn find(directory: &Path, filename: &Path) -> Result<PathBuf> {
let candidate = directory.join(filename);
match fs::metadata(&candidate) {
Ok(metadata) => {
if metadata.is_file() {
return Ok(candidate);
}
}
Err(error) => {
if error.kind() != io::ErrorKind::NotFound {
return Err(Error::Io(error));
}
}
}
if let Some(parent) = directory.parent() {
find(parent, filename)
} else {
Err(Error::Io(io::Error::new(
io::ErrorKind::NotFound,
"path not found",
)))
}
}

View File

@@ -0,0 +1,235 @@
use std::collections::HashMap;
use std::env;
use std::io::prelude::*;
use std::io::BufReader;
use crate::errors::*;
use crate::parse;
/// Result of loading environment variables
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct LoadResult {
/// Number of successfully loaded variables
pub loaded: usize,
/// Number of variables that were skipped (for `load` method only)
pub skipped: usize,
/// Number of variables that were overridden (for `load_override` method only)
pub overridden: usize,
}
pub struct Iter<R> {
lines: QuotedLines<BufReader<R>>,
substitution_data: HashMap<String, Option<String>>,
}
impl<R: Read> Iter<R> {
pub fn new(reader: R) -> Iter<R> {
Iter {
lines: QuotedLines {
buf: BufReader::new(reader),
},
substitution_data: HashMap::new(),
}
}
/// Loads all variables found in the `reader` into the environment,
/// preserving any existing environment variables of the same name.
///
/// If a variable is specified multiple times within the reader's data,
/// then the first occurrence is applied.
///
/// Returns a `LoadResult` containing the number of loaded and skipped variables.
pub fn load(mut self) -> Result<LoadResult> {
self.remove_bom()?;
let mut loaded = 0;
let mut skipped = 0;
for item in self {
let (key, value) = item?;
if env::var(&key).is_err() {
env::set_var(&key, value);
loaded += 1;
} else {
skipped += 1;
}
}
Ok(LoadResult {
loaded,
skipped,
overridden: 0,
})
}
/// Loads all variables found in the `reader` into the environment,
/// overriding any existing environment variables of the same name.
///
/// If a variable is specified multiple times within the reader's data,
/// then the last occurrence is applied.
///
/// Returns a `LoadResult` containing the number of loaded and overridden variables.
pub fn load_override(mut self) -> Result<LoadResult> {
self.remove_bom()?;
let mut loaded = 0;
let mut overridden = 0;
for item in self {
let (key, value) = item?;
if env::var(&key).is_ok() {
overridden += 1;
}
env::set_var(key, value);
loaded += 1;
}
Ok(LoadResult {
loaded,
skipped: 0,
overridden,
})
}
fn remove_bom(&mut self) -> Result<()> {
let buffer = self.lines.buf.fill_buf().map_err(Error::Io)?;
// https://www.compart.com/en/unicode/U+FEFF
if buffer.starts_with(&[0xEF, 0xBB, 0xBF]) {
// remove the BOM from the bufreader
self.lines.buf.consume(3);
}
Ok(())
}
}
struct QuotedLines<B> {
buf: B,
}
enum ParseState {
Complete,
Escape,
StrongOpen,
StrongOpenEscape,
WeakOpen,
WeakOpenEscape,
Comment,
WhiteSpace,
}
fn eval_end_state(prev_state: ParseState, buf: &str) -> (usize, ParseState) {
let mut cur_state = prev_state;
let mut cur_pos: usize = 0;
for (pos, c) in buf.char_indices() {
cur_pos = pos;
cur_state = match cur_state {
ParseState::WhiteSpace => match c {
'#' => return (cur_pos, ParseState::Comment),
'\\' => ParseState::Escape,
'"' => ParseState::WeakOpen,
'\'' => ParseState::StrongOpen,
_ => ParseState::Complete,
},
ParseState::Escape => ParseState::Complete,
ParseState::Complete => match c {
c if c.is_whitespace() && c != '\n' && c != '\r' => ParseState::WhiteSpace,
'\\' => ParseState::Escape,
'"' => ParseState::WeakOpen,
'\'' => ParseState::StrongOpen,
_ => ParseState::Complete,
},
ParseState::WeakOpen => match c {
'\\' => ParseState::WeakOpenEscape,
'"' => ParseState::Complete,
_ => ParseState::WeakOpen,
},
ParseState::WeakOpenEscape => ParseState::WeakOpen,
ParseState::StrongOpen => match c {
'\\' => ParseState::StrongOpenEscape,
'\'' => ParseState::Complete,
_ => ParseState::StrongOpen,
},
ParseState::StrongOpenEscape => ParseState::StrongOpen,
// Comments last the entire line.
ParseState::Comment => panic!("should have returned early"),
};
}
(cur_pos, cur_state)
}
impl<B: BufRead> Iterator for QuotedLines<B> {
type Item = Result<String>;
fn next(&mut self) -> Option<Result<String>> {
let mut buf = String::new();
let mut cur_state = ParseState::Complete;
let mut buf_pos;
let mut cur_pos;
loop {
buf_pos = buf.len();
match self.buf.read_line(&mut buf) {
Ok(0) => match cur_state {
ParseState::Complete => return None,
_ => {
let len = buf.len();
return Some(Err(Error::LineParse(buf, len)));
}
},
Ok(_n) => {
// Skip lines which start with a # before iteration
// This optimizes parsing a bit.
if buf.trim_start().starts_with('#') {
return Some(Ok(String::with_capacity(0)));
}
let result = eval_end_state(cur_state, &buf[buf_pos..]);
cur_pos = result.0;
cur_state = result.1;
match cur_state {
ParseState::Complete => {
if buf.ends_with('\n') {
buf.pop();
if buf.ends_with('\r') {
buf.pop();
}
}
return Some(Ok(buf));
}
ParseState::Escape
| ParseState::StrongOpen
| ParseState::StrongOpenEscape
| ParseState::WeakOpen
| ParseState::WeakOpenEscape
| ParseState::WhiteSpace => {}
ParseState::Comment => {
buf.truncate(buf_pos + cur_pos);
return Some(Ok(buf));
}
}
}
Err(e) => return Some(Err(Error::Io(e))),
}
}
}
}
impl<R: Read> Iterator for Iter<R> {
type Item = Result<(String, String)>;
fn next(&mut self) -> Option<Self::Item> {
loop {
let line = match self.lines.next() {
Some(Ok(line)) => line,
Some(Err(err)) => return Some(Err(err)),
None => return None,
};
match parse::parse_line(&line, &mut self.substitution_data) {
Ok(Some(result)) => return Some(Ok(result)),
Ok(None) => {}
Err(err) => return Some(Err(err)),
}
}
}
}

View File

@@ -0,0 +1,371 @@
//! [`dotenv`]: https://crates.io/crates/dotenv
//! A well-maintained fork of the [`dotenv`] crate
//!
//! This library loads environment variables from a *.env* file. This is convenient for dev environments.
mod errors;
mod find;
mod iter;
mod parse;
use std::env::{self, Vars};
use std::ffi::OsStr;
use std::fs::File;
use std::io;
use std::path::{Path, PathBuf};
use std::sync::Once;
pub use crate::errors::*;
use crate::find::Finder;
pub use crate::iter::{Iter, LoadResult};
static START: Once = Once::new();
/// Gets the value for an environment variable.
///
/// The value is `Ok(s)` if the environment variable is present and valid unicode.
///
/// Note: this function gets values from any visible environment variable key,
/// regardless of whether a *.env* file was loaded.
///
/// # Examples:
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let value = dotenvy::var("HOME")?;
/// println!("{}", value); // prints `/home/foo`
/// # Ok(())
/// # }
/// ```
pub fn var<K: AsRef<OsStr>>(key: K) -> Result<String> {
START.call_once(|| {
dotenv().ok();
});
env::var(key).map_err(Error::EnvVar)
}
/// Returns an iterator of `(key, value)` pairs for all environment variables of the current process.
/// The returned iterator contains a snapshot of the process's environment variables at the time of invocation. Modifications to environment variables afterwards will not be reflected.
///
/// # Examples:
///
/// ```no_run
/// use std::io;
///
/// let result: Vec<(String, String)> = dotenvy::vars().collect();
/// ```
pub fn vars() -> Vars {
START.call_once(|| {
dotenv().ok();
});
env::vars()
}
/// Loads environment variables from the specified path.
///
/// If variables with the same names already exist in the environment, then their values will be
/// preserved.
///
/// Where multiple declarations for the same environment variable exist in your *.env*
/// file, the *first one* is applied.
///
/// If you wish to ensure all variables are loaded from your *.env* file, ignoring variables
/// already existing in the environment, then use [`from_path_override`] instead.
///
/// # Examples
///
/// ```no_run
/// use std::path::Path;
///
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// dotenvy::from_path(Path::new("path/to/.env"))?;
/// # Ok(())
/// # }
/// ```
pub fn from_path<P: AsRef<Path>>(path: P) -> Result<()> {
let iter = Iter::new(File::open(path).map_err(Error::Io)?);
iter.load().map(|_| ())
}
/// Loads environment variables from the specified path,
/// overriding existing environment variables.
///
/// Where multiple declarations for the same environment variable exist in your *.env* file, the
/// *last one* is applied.
///
/// If you want the existing environment to take precedence,
/// or if you want to be able to override environment variables on the command line,
/// then use [`from_path`] instead.
///
/// # Examples
///
/// ```no_run
/// use std::path::Path;
///
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// dotenvy::from_path_override(Path::new("path/to/.env"))?;
/// # Ok(())
/// # }
/// ```
pub fn from_path_override<P: AsRef<Path>>(path: P) -> Result<()> {
let iter = Iter::new(File::open(path).map_err(Error::Io)?);
iter.load_override().map(|_| ())
}
/// Returns an iterator over environment variables from the specified path.
///
/// # Examples
///
/// ```no_run
/// use std::path::Path;
///
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// for item in dotenvy::from_path_iter(Path::new("path/to/.env"))? {
/// let (key, val) = item?;
/// println!("{}={}", key, val);
/// }
/// # Ok(())
/// # }
/// ```
pub fn from_path_iter<P: AsRef<Path>>(path: P) -> Result<Iter<File>> {
Ok(Iter::new(File::open(path).map_err(Error::Io)?))
}
/// Loads environment variables from the specified file.
///
/// If variables with the same names already exist in the environment, then their values will be
/// preserved.
///
/// Where multiple declarations for the same environment variable exist in your *.env*
/// file, the *first one* is applied.
///
/// If you wish to ensure all variables are loaded from your *.env* file, ignoring variables
/// already existing in the environment, then use [`from_filename_override`] instead.
///
/// # Examples
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// dotenvy::from_filename("custom.env")?;
/// # Ok(())
/// # }
/// ```
///
/// It is also possible to load from a typical *.env* file like so. However, using [`dotenv`] is preferred.
///
/// ```
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// dotenvy::from_filename(".env")?;
/// # Ok(())
/// # }
/// ```
pub fn from_filename<P: AsRef<Path>>(filename: P) -> Result<LoadResult> {
let (_path, iter) = Finder::new().filename(filename.as_ref()).find()?;
iter.load()
}
/// Loads environment variables from the specified file,
/// overriding existing environment variables.
///
/// Where multiple declarations for the same environment variable exist in your *.env* file, the
/// *last one* is applied.
///
/// If you want the existing environment to take precedence,
/// or if you want to be able to override environment variables on the command line,
/// then use [`from_filename`] instead.
///
/// # Examples
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// dotenvy::from_filename_override("custom.env")?;
/// # Ok(())
/// # }
/// ```
///
/// It is also possible to load from a typical *.env* file like so. However, using [`dotenv_override`] is preferred.
///
/// ```
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// dotenvy::from_filename_override(".env")?;
/// # Ok(())
/// # }
/// ```
pub fn from_filename_override<P: AsRef<Path>>(filename: P) -> Result<LoadResult> {
let (_path, iter) = Finder::new().filename(filename.as_ref()).find()?;
iter.load_override()
}
/// Returns an iterator over environment variables from the specified file.
///
/// # Examples
///
/// ```no_run
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// for item in dotenvy::from_filename_iter("custom.env")? {
/// let (key, val) = item?;
/// println!("{}={}", key, val);
/// }
/// # Ok(())
/// # }
/// ```
pub fn from_filename_iter<P: AsRef<Path>>(filename: P) -> Result<Iter<File>> {
let (_, iter) = Finder::new().filename(filename.as_ref()).find()?;
Ok(iter)
}
/// Loads environment variables from [`io::Read`](std::io::Read).
///
/// This is useful for loading environment variables from IPC or the network.
///
/// If variables with the same names already exist in the environment, then their values will be
/// preserved.
///
/// Where multiple declarations for the same environment variable exist in your `reader`,
/// the *first one* is applied.
///
/// If you wish to ensure all variables are loaded from your `reader`, ignoring variables
/// already existing in the environment, then use [`from_read_override`] instead.
///
/// For regular files, use [`from_path`] or [`from_filename`].
///
/// # Examples
///
/// ```no_run
/// # #![cfg(unix)]
/// use std::io::Read;
/// use std::os::unix::net::UnixStream;
///
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut stream = UnixStream::connect("/some/socket")?;
/// dotenvy::from_read(stream)?;
/// # Ok(())
/// # }
/// ```
pub fn from_read<R: io::Read>(reader: R) -> Result<()> {
let iter = Iter::new(reader);
iter.load()?;
Ok(())
}
/// Loads environment variables from [`io::Read`](std::io::Read),
/// overriding existing environment variables.
///
/// This is useful for loading environment variables from IPC or the network.
///
/// Where multiple declarations for the same environment variable exist in your `reader`, the
/// *last one* is applied.
///
/// If you want the existing environment to take precedence,
/// or if you want to be able to override environment variables on the command line,
/// then use [`from_read`] instead.
///
/// For regular files, use [`from_path_override`] or [`from_filename_override`].
///
/// # Examples
/// ```no_run
/// # #![cfg(unix)]
/// use std::io::Read;
/// use std::os::unix::net::UnixStream;
///
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut stream = UnixStream::connect("/some/socket")?;
/// dotenvy::from_read_override(stream)?;
/// # Ok(())
/// # }
/// ```
pub fn from_read_override<R: io::Read>(reader: R) -> Result<()> {
let iter = Iter::new(reader);
iter.load_override()?;
Ok(())
}
/// Returns an iterator over environment variables from [`io::Read`](std::io::Read).
///
/// # Examples
///
/// ```no_run
/// # #![cfg(unix)]
/// use std::io::Read;
/// use std::os::unix::net::UnixStream;
///
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut stream = UnixStream::connect("/some/socket")?;
///
/// for item in dotenvy::from_read_iter(stream) {
/// let (key, val) = item?;
/// println!("{}={}", key, val);
/// }
/// # Ok(())
/// # }
/// ```
pub fn from_read_iter<R: io::Read>(reader: R) -> Iter<R> {
Iter::new(reader)
}
/// Loads the *.env* file from the current directory or parents. This is typically what you want.
///
/// If variables with the same names already exist in the environment, then their values will be
/// preserved.
///
/// Where multiple declarations for the same environment variable exist in your *.env*
/// file, the *first one* is applied.
///
/// If you wish to ensure all variables are loaded from your *.env* file, ignoring variables
/// already existing in the environment, then use [`dotenv_override`] instead.
///
/// An error will be returned if the file is not found.
///
/// # Examples
///
/// ```
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// dotenvy::dotenv()?;
/// # Ok(())
/// # }
/// ```
pub fn dotenv() -> Result<PathBuf> {
let (path, iter) = Finder::new().find()?;
iter.load()?;
Ok(path)
}
/// Loads all variables found in the `reader` into the environment,
/// overriding any existing environment variables of the same name.
///
/// Where multiple declarations for the same environment variable exist in your *.env* file, the
/// *last one* is applied.
///
/// If you want the existing environment to take precedence,
/// or if you want to be able to override environment variables on the command line,
/// then use [`dotenv`] instead.
///
/// # Examples
/// ```
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// dotenvy::dotenv_override()?;
/// # Ok(())
/// # }
/// ```
pub fn dotenv_override() -> Result<PathBuf> {
let (path, iter) = Finder::new().find()?;
iter.load_override()?;
Ok(path)
}
/// Returns an iterator over environment variables.
///
/// # Examples
///
/// ```
/// # fn main() -> Result<(), Box<dyn std::error::Error>> {
/// for item in dotenvy::dotenv_iter()? {
/// let (key, val) = item?;
/// println!("{}={}", key, val);
/// }
/// # Ok(())
/// # }
/// ```
pub fn dotenv_iter() -> Result<iter::Iter<File>> {
let (_, iter) = Finder::new().find()?;
Ok(iter)
}

View File

@@ -0,0 +1,653 @@
use std::collections::HashMap;
use std::env;
use crate::errors::*;
// for readability's sake
pub type ParsedLine = Result<Option<(String, String)>>;
pub fn parse_line(
line: &str,
substitution_data: &mut HashMap<String, Option<String>>,
) -> ParsedLine {
let mut parser = LineParser::new(line, substitution_data);
parser.parse_line()
}
struct LineParser<'a> {
original_line: &'a str,
substitution_data: &'a mut HashMap<String, Option<String>>,
line: &'a str,
pos: usize,
}
impl<'a> LineParser<'a> {
fn new(
line: &'a str,
substitution_data: &'a mut HashMap<String, Option<String>>,
) -> LineParser<'a> {
LineParser {
original_line: line,
substitution_data,
line: line.trim_end(), // we dont want trailing whitespace
pos: 0,
}
}
fn err(&self) -> Error {
Error::LineParse(self.original_line.into(), self.pos)
}
fn parse_line(&mut self) -> ParsedLine {
self.skip_whitespace();
// if its an empty line or a comment, skip it
if self.line.is_empty() || self.line.starts_with('#') {
return Ok(None);
}
let mut key = self.parse_key()?;
self.skip_whitespace();
// export can be either an optional prefix or a key itself
if key == "export" {
// here we check for an optional `=`, below we throw directly when its not found.
if self.expect_equal().is_err() {
key = self.parse_key()?;
self.skip_whitespace();
self.expect_equal()?;
}
} else {
self.expect_equal()?;
}
self.skip_whitespace();
if self.line.is_empty() || self.line.starts_with('#') {
self.substitution_data.insert(key.clone(), None);
return Ok(Some((key, String::new())));
}
let parsed_value = parse_value(self.line, self.substitution_data)?;
self.substitution_data
.insert(key.clone(), Some(parsed_value.clone()));
Ok(Some((key, parsed_value)))
}
fn parse_key(&mut self) -> Result<String> {
if !self
.line
.starts_with(|c: char| c.is_ascii_alphabetic() || c == '_')
{
return Err(self.err());
}
let index = match self
.line
.find(|c: char| !(c.is_ascii_alphanumeric() || c == '_' || c == '.'))
{
Some(index) => index,
None => self.line.len(),
};
self.pos += index;
let key = String::from(&self.line[..index]);
self.line = &self.line[index..];
Ok(key)
}
fn expect_equal(&mut self) -> Result<()> {
if !self.line.starts_with('=') {
return Err(self.err());
}
self.line = &self.line[1..];
self.pos += 1;
Ok(())
}
fn skip_whitespace(&mut self) {
if let Some(index) = self.line.find(|c: char| !c.is_whitespace()) {
self.pos += index;
self.line = &self.line[index..];
} else {
self.pos += self.line.len();
self.line = "";
}
}
}
#[derive(Eq, PartialEq)]
enum SubstitutionMode {
None,
Block,
EscapedBlock,
}
fn parse_value(
input: &str,
substitution_data: &mut HashMap<String, Option<String>>,
) -> Result<String> {
let mut strong_quote = false; // '
let mut weak_quote = false; // "
let mut escaped = false;
let mut expecting_end = false;
//FIXME can this be done without yet another allocation per line?
let mut output = String::new();
let mut substitution_mode = SubstitutionMode::None;
let mut substitution_name = String::new();
for (index, c) in input.chars().enumerate() {
//the regex _should_ already trim whitespace off the end
//expecting_end is meant to permit: k=v #comment
//without affecting: k=v#comment
//and throwing on: k=v w
if expecting_end {
if c == ' ' || c == '\t' {
continue;
} else if c == '#' {
break;
} else {
return Err(Error::LineParse(input.to_owned(), index));
}
} else if escaped {
//TODO I tried handling literal \r but various issues
//imo not worth worrying about until there's a use case
//(actually handling backslash 0x10 would be a whole other matter)
//then there's \v \f bell hex... etc
match c {
'\\' | '\'' | '"' | '$' | ' ' => output.push(c),
'n' => output.push('\n'), // handle \n case
_ => {
return Err(Error::LineParse(input.to_owned(), index));
}
}
escaped = false;
} else if strong_quote {
if c == '\'' {
strong_quote = false;
} else {
output.push(c);
}
} else if substitution_mode != SubstitutionMode::None {
if c.is_alphanumeric() {
substitution_name.push(c);
} else {
match substitution_mode {
SubstitutionMode::None => unreachable!(),
SubstitutionMode::Block => {
if c == '{' && substitution_name.is_empty() {
substitution_mode = SubstitutionMode::EscapedBlock;
} else {
apply_substitution(
substitution_data,
&substitution_name.drain(..).collect::<String>(),
&mut output,
);
if c == '$' {
substitution_mode = if !strong_quote && !escaped {
SubstitutionMode::Block
} else {
SubstitutionMode::None
}
} else {
substitution_mode = SubstitutionMode::None;
output.push(c);
}
}
}
SubstitutionMode::EscapedBlock => {
if c == '}' {
substitution_mode = SubstitutionMode::None;
apply_substitution(
substitution_data,
&substitution_name.drain(..).collect::<String>(),
&mut output,
);
} else {
substitution_name.push(c);
}
}
}
}
} else if c == '$' {
substitution_mode = if !strong_quote && !escaped {
SubstitutionMode::Block
} else {
SubstitutionMode::None
}
} else if weak_quote {
if c == '"' {
weak_quote = false;
} else if c == '\\' {
escaped = true;
} else {
output.push(c);
}
} else if c == '\'' {
strong_quote = true;
} else if c == '"' {
weak_quote = true;
} else if c == '\\' {
escaped = true;
} else if c == ' ' || c == '\t' {
expecting_end = true;
} else {
output.push(c);
}
}
//XXX also fail if escaped? or...
if substitution_mode == SubstitutionMode::EscapedBlock || strong_quote || weak_quote {
let value_length = input.len();
Err(Error::LineParse(
input.to_owned(),
if value_length == 0 {
0
} else {
value_length - 1
},
))
} else {
apply_substitution(
substitution_data,
&substitution_name.drain(..).collect::<String>(),
&mut output,
);
Ok(output)
}
}
fn apply_substitution(
substitution_data: &mut HashMap<String, Option<String>>,
substitution_name: &str,
output: &mut String,
) {
if let Ok(environment_value) = env::var(substitution_name) {
output.push_str(&environment_value);
} else {
let stored_value = substitution_data
.get(substitution_name)
.unwrap_or(&None)
.to_owned();
output.push_str(&stored_value.unwrap_or_default());
};
}
#[cfg(test)]
mod test {
use crate::iter::Iter;
use super::*;
#[test]
fn test_parse_line_env() {
// Note 5 spaces after 'KEY8=' below
let actual_iter = Iter::new(
r#"
KEY=1
KEY2="2"
KEY3='3'
KEY4='fo ur'
KEY5="fi ve"
KEY6=s\ ix
KEY7=
KEY8=
KEY9= # foo
KEY10 ="whitespace before ="
KEY11= "whitespace after ="
export="export as key"
export SHELL_LOVER=1
"#
.as_bytes(),
);
let expected_iter = vec![
("KEY", "1"),
("KEY2", "2"),
("KEY3", "3"),
("KEY4", "fo ur"),
("KEY5", "fi ve"),
("KEY6", "s ix"),
("KEY7", ""),
("KEY8", ""),
("KEY9", ""),
("KEY10", "whitespace before ="),
("KEY11", "whitespace after ="),
("export", "export as key"),
("SHELL_LOVER", "1"),
]
.into_iter()
.map(|(key, value)| (key.to_string(), value.to_string()));
let mut count = 0;
for (expected, actual) in expected_iter.zip(actual_iter) {
assert!(actual.is_ok());
assert_eq!(expected, actual.unwrap());
count += 1;
}
assert_eq!(count, 13);
}
#[test]
fn test_parse_line_comment() {
let result: Result<Vec<(String, String)>> = Iter::new(
r#"
# foo=bar
# "#
.as_bytes(),
)
.collect();
assert!(result.unwrap().is_empty());
}
#[test]
fn test_parse_line_invalid() {
// Note 4 spaces after 'invalid' below
let actual_iter = Iter::new(
r#"
invalid
very bacon = yes indeed
=value"#
.as_bytes(),
);
let mut count = 0;
for actual in actual_iter {
assert!(actual.is_err());
count += 1;
}
assert_eq!(count, 3);
}
#[test]
fn test_parse_value_escapes() {
let actual_iter = Iter::new(
r#"
KEY=my\ cool\ value
KEY2=\$sweet
KEY3="awesome stuff \"mang\""
KEY4='sweet $\fgs'\''fds'
KEY5="'\"yay\\"\ "stuff"
KEY6="lol" #well you see when I say lol wh
KEY7="line 1\nline 2"
"#
.as_bytes(),
);
let expected_iter = vec![
("KEY", r#"my cool value"#),
("KEY2", r#"$sweet"#),
("KEY3", r#"awesome stuff "mang""#),
("KEY4", r#"sweet $\fgs'fds"#),
("KEY5", r#"'"yay\ stuff"#),
("KEY6", "lol"),
("KEY7", "line 1\nline 2"),
]
.into_iter()
.map(|(key, value)| (key.to_string(), value.to_string()));
for (expected, actual) in expected_iter.zip(actual_iter) {
assert!(actual.is_ok());
assert_eq!(expected, actual.unwrap());
}
}
#[test]
fn test_parse_value_escapes_invalid() {
let actual_iter = Iter::new(
r#"
KEY=my uncool value
KEY2="why
KEY3='please stop''
KEY4=h\8u
"#
.as_bytes(),
);
for actual in actual_iter {
assert!(actual.is_err());
}
}
}
#[cfg(test)]
mod variable_substitution_tests {
use crate::iter::Iter;
use std::env;
fn assert_parsed_string(input_string: &str, expected_parse_result: Vec<(&str, &str)>) {
let actual_iter = Iter::new(input_string.as_bytes());
let expected_count = &expected_parse_result.len();
let expected_iter = expected_parse_result
.into_iter()
.map(|(key, value)| (key.to_string(), value.to_string()));
let mut count = 0;
for (expected, actual) in expected_iter.zip(actual_iter) {
assert!(actual.is_ok());
assert_eq!(expected, actual.unwrap());
count += 1;
}
assert_eq!(count, *expected_count);
}
#[test]
fn variable_in_parenthesis_surrounded_by_quotes() {
assert_parsed_string(
r#"
KEY=test
KEY1="${KEY}"
"#,
vec![("KEY", "test"), ("KEY1", "test")],
);
}
#[test]
fn substitute_undefined_variables_to_empty_string() {
assert_parsed_string(r#"KEY=">$KEY1<>${KEY2}<""#, vec![("KEY", "><><")]);
}
#[test]
fn do_not_substitute_variables_with_dollar_escaped() {
assert_parsed_string(
"KEY=>\\$KEY1<>\\${KEY2}<",
vec![("KEY", ">$KEY1<>${KEY2}<")],
);
}
#[test]
fn do_not_substitute_variables_in_weak_quotes_with_dollar_escaped() {
assert_parsed_string(
r#"KEY=">\$KEY1<>\${KEY2}<""#,
vec![("KEY", ">$KEY1<>${KEY2}<")],
);
}
#[test]
fn do_not_substitute_variables_in_strong_quotes() {
assert_parsed_string("KEY='>${KEY1}<>$KEY2<'", vec![("KEY", ">${KEY1}<>$KEY2<")]);
}
#[test]
fn same_variable_reused() {
assert_parsed_string(
r#"
KEY=VALUE
KEY1=$KEY$KEY
"#,
vec![("KEY", "VALUE"), ("KEY1", "VALUEVALUE")],
);
}
#[test]
fn with_dot() {
assert_parsed_string(
r#"
KEY.Value=VALUE
"#,
vec![("KEY.Value", "VALUE")],
);
}
#[test]
fn recursive_substitution() {
assert_parsed_string(
r#"
KEY=${KEY1}+KEY_VALUE
KEY1=${KEY}+KEY1_VALUE
"#,
vec![("KEY", "+KEY_VALUE"), ("KEY1", "+KEY_VALUE+KEY1_VALUE")],
);
}
#[test]
fn variable_without_parenthesis_is_substituted_before_separators() {
assert_parsed_string(
r#"
KEY1=test_user
KEY1_1=test_user_with_separator
KEY=">$KEY1_1<>$KEY1}<>$KEY1{<"
"#,
vec![
("KEY1", "test_user"),
("KEY1_1", "test_user_with_separator"),
("KEY", ">test_user_1<>test_user}<>test_user{<"),
],
);
}
#[test]
fn substitute_variable_from_env_variable() {
env::set_var("KEY11", "test_user_env");
assert_parsed_string(r#"KEY=">${KEY11}<""#, vec![("KEY", ">test_user_env<")]);
}
#[test]
fn substitute_variable_env_variable_overrides_dotenv_in_substitution() {
env::set_var("KEY11", "test_user_env");
assert_parsed_string(
r#"
KEY11=test_user
KEY=">${KEY11}<"
"#,
vec![("KEY11", "test_user"), ("KEY", ">test_user_env<")],
);
}
#[test]
fn consequent_substitutions() {
assert_parsed_string(
r#"
KEY1=test_user
KEY2=$KEY1_2
KEY=>${KEY1}<>${KEY2}<
"#,
vec![
("KEY1", "test_user"),
("KEY2", "test_user_2"),
("KEY", ">test_user<>test_user_2<"),
],
);
}
#[test]
fn consequent_substitutions_with_one_missing() {
assert_parsed_string(
r#"
KEY2=$KEY1_2
KEY=>${KEY1}<>${KEY2}<
"#,
vec![("KEY2", "_2"), ("KEY", "><>_2<")],
);
}
}
#[cfg(test)]
mod error_tests {
use crate::errors::Error::LineParse;
use crate::iter::Iter;
#[test]
fn should_not_parse_unfinished_substitutions() {
let wrong_value = ">${KEY{<";
let parsed_values: Vec<_> = Iter::new(
format!(
r#"
KEY=VALUE
KEY1={}
"#,
wrong_value
)
.as_bytes(),
)
.collect();
assert_eq!(parsed_values.len(), 2);
if let Ok(first_line) = &parsed_values[0] {
assert_eq!(first_line, &(String::from("KEY"), String::from("VALUE")))
} else {
panic!("Expected the first value to be parsed")
}
if let Err(LineParse(second_value, index)) = &parsed_values[1] {
assert_eq!(second_value, wrong_value);
assert_eq!(*index, wrong_value.len() - 1)
} else {
panic!("Expected the second value not to be parsed")
}
}
#[test]
fn should_not_allow_dot_as_first_character_of_key() {
let wrong_key_value = ".Key=VALUE";
let parsed_values: Vec<_> = Iter::new(wrong_key_value.as_bytes()).collect();
assert_eq!(parsed_values.len(), 1);
if let Err(LineParse(second_value, index)) = &parsed_values[0] {
assert_eq!(second_value, wrong_key_value);
assert_eq!(*index, 0)
} else {
panic!("Expected the second value not to be parsed")
}
}
#[test]
fn should_not_parse_illegal_format() {
let wrong_format = r"<><><>";
let parsed_values: Vec<_> = Iter::new(wrong_format.as_bytes()).collect();
assert_eq!(parsed_values.len(), 1);
if let Err(LineParse(wrong_value, index)) = &parsed_values[0] {
assert_eq!(wrong_value, wrong_format);
assert_eq!(*index, 0)
} else {
panic!("Expected the second value not to be parsed")
}
}
#[test]
fn should_not_parse_illegal_escape() {
let wrong_escape = r">\f<";
let parsed_values: Vec<_> =
Iter::new(format!("VALUE={}", wrong_escape).as_bytes()).collect();
assert_eq!(parsed_values.len(), 1);
if let Err(LineParse(wrong_value, index)) = &parsed_values[0] {
assert_eq!(wrong_value, wrong_escape);
assert_eq!(*index, wrong_escape.find('\\').unwrap() + 1)
} else {
panic!("Expected the second value not to be parsed")
}
}
}

10
patch/h2-0.4.10/.gitignore vendored Normal file
View File

@@ -0,0 +1,10 @@
target
Cargo.lock
h2spec
# These are backup files generated by rustfmt
**/*.rs.bk
# Files generated by honggfuzz
hfuzz_target
hfuzz_workspace

View File

@@ -0,0 +1,377 @@
# 0.4.11 (June 30, 2025)
* Fix client to not return an error when a clean shutdown otherwise doesn't get a TLS close_notify, which some servers don't bother sending.
# 0.4.10 (May 5, 2025)
* Fix `is_end_stream()` to return true only when ended cleanly, not when errored.
# 0.4.9 (April 14, 2025)
* Add `sever::Connection::has_streams()` method to check for active streams.
# 0.4.8 (February 18, 2025)
* Fix handling implicit stream resets at the more correct time.
* Fix window size decrements of send-closed streams.
* Fix reclaiming of reserved capacity when streams are closed.
* Fix to no longer call `poll_flush` after `poll_shutdown`.
* Fix busy loop in task when poll_shutdown returns pending.
# 0.4.7 (November 19, 2024)
* Fix treating HEADERS frames with a non-zero content-length but END_STREAM flag as malformed.
* Fix notifying the stream task when automatically reset on receipt of a stream error.
# 0.4.6 (August 19, 2024)
* Add `current_max_send_streams()` and `current_max_recv_streams()` to `client::SendRequest`.
* Fix sending a PROTOCOL_ERROR instead of REFUSED_STREAM when receiving oversized headers.
* Fix notifying a PushPromise task properly.
* Fix notifying a stream task when reset.
# 0.4.5 (May 17, 2024)
* Fix race condition that sometimes hung connections during shutdown.
* Fix pseudo header construction for CONNECT and OPTIONS requests.
# 0.4.4 (April 3, 2024)
* Limit number of CONTINUATION frames for misbehaving connections.
# 0.4.3 (March 15, 2024)
* Fix flow control limits to not apply until receiving SETTINGS ack.
* Fix not returning an error if IO ended without `close_notify`.
* Improve performance of decoding many headers.
# 0.4.2 (January 17th, 2024)
* Limit error resets for misbehaving connections.
* Fix selecting MAX_CONCURRENT_STREAMS value if no value is advertised initially.
# 0.4.1 (January 8, 2024)
* Fix assigning connection capacity which could starve streams in some instances.
# 0.4.0 (November 15, 2023)
* Update to `http` 1.0.
* Remove deprecated `Server::poll_close()`.
# 0.3.22 (November 15, 2023)
* Add `header_table_size(usize)` option to client and server builders.
* Improve throughput when vectored IO is not available.
* Update indexmap to 2.
# 0.3.21 (August 21, 2023)
* Fix opening of new streams over peer's max concurrent limit.
* Fix `RecvStream` to return data even if it has received a `CANCEL` stream error.
* Update MSRV to 1.63.
# 0.3.20 (June 26, 2023)
* Fix panic if a server received a request with a `:status` pseudo header in the 1xx range.
* Fix panic if a reset stream had pending push promises that were more than allowed.
* Fix potential flow control overflow by subtraction, instead returning a connection error.
# 0.3.19 (May 12, 2023)
* Fix counting reset streams when triggered by a GOAWAY.
* Send `too_many_resets` in opaque debug data of GOAWAY when too many resets received.
# 0.3.18 (April 17, 2023)
* Fix panic because of opposite check in `is_remote_local()`.
# 0.3.17 (April 13, 2023)
* Add `Error::is_library()` method to check if the originated inside `h2`.
* Add `max_pending_accept_reset_streams(usize)` option to client and server
builders.
* Fix theoretical memory growth when receiving too many HEADERS and then
RST_STREAM frames faster than an application can accept them off the queue.
(CVE-2023-26964)
# 0.3.16 (February 27, 2023)
* Set `Protocol` extension on requests when received Extended CONNECT requests.
* Remove `B: Unpin + 'static` bound requiremented of bufs
* Fix releasing of frames when stream is finished, reducing memory usage.
* Fix panic when trying to send data and connection window is available, but stream window is not.
* Fix spurious wakeups when stream capacity is not available.
# 0.3.15 (October 21, 2022)
* Remove `B: Buf` bound on `SendStream`'s parameter
* add accessor for `StreamId` u32
# 0.3.14 (August 16, 2022)
* Add `Error::is_reset` function.
* Bump MSRV to Rust 1.56.
* Return `RST_STREAM(NO_ERROR)` when the server early responds.
# 0.3.13 (March 31, 2022)
* Update private internal `tokio-util` dependency.
# 0.3.12 (March 9, 2022)
* Avoid time operations that can panic (#599)
* Bump MSRV to Rust 1.49 (#606)
* Fix header decoding error when a header name is contained at a continuation
header boundary (#589)
* Remove I/O type names from handshake `tracing` spans (#608)
# 0.3.11 (January 26, 2022)
* Make `SendStream::poll_capacity` never return `Ok(Some(0))` (#596)
* Fix panic when receiving already reset push promise (#597)
# 0.3.10 (January 6, 2022)
* Add `Error::is_go_away()` and `Error::is_remote()` methods.
* Fix panic if receiving malformed PUSH_PROMISE with stream ID of 0.
# 0.3.9 (December 9, 2021)
* Fix hang related to new `max_send_buffer_size`.
# 0.3.8 (December 8, 2021)
* Add "extended CONNECT support". Adds `h2::ext::Protocol`, which is used for request and response extensions to connect new protocols over an HTTP/2 stream.
* Add `max_send_buffer_size` options to client and server builders, and a default of ~400MB. This acts like a high-water mark for the `poll_capacity()` method.
* Fix panic if receiving malformed HEADERS with stream ID of 0.
# 0.3.7 (October 22, 2021)
* Fix panic if server sends a malformed frame on a stream client was about to open.
* Fix server to treat `:status` in a request as a stream error instead of connection error.
# 0.3.6 (September 30, 2021)
* Fix regression of `h2::Error` that were created via `From<h2::Reason>` not returning their reason code in `Error::reason()`.
# 0.3.5 (September 29, 2021)
* Fix sending of very large headers. Previously when a single header was too big to fit in a single `HEADERS` frame, an error was returned. Now it is broken up and sent correctly.
* Fix buffered data field to be a bigger integer size.
* Refactor error format to include what initiated the error (remote, local, or user), if it was a stream or connection-level error, and any received debug data.
# 0.3.4 (August 20, 2021)
* Fix panic when encoding header size update over a certain size.
* Fix `SendRequest` to wake up connection when dropped.
* Fix potential hang if `RecvStream` is placed in the request or response `extensions`.
* Stop calling `Instant::now` if zero reset streams are configured.
# 0.3.3 (April 29, 2021)
* Fix client being able to make `CONNECT` requests without a `:path`.
* Expose `RecvStream::poll_data`.
* Fix some docs.
# 0.3.2 (March 24, 2021)
* Fix incorrect handling of received 1xx responses on the client when the request body is still streaming.
# 0.3.1 (February 26, 2021)
* Add `Connection::max_concurrent_recv_streams()` getter.
* Add `Connection::max_concurrent_send_streams()` getter.
* Fix client to ignore receipt of 1xx headers frames.
* Fix incorrect calculation of pseudo header lengths when determining if a received header is too big.
* Reduce monomorphized code size of internal code.
# 0.3.0 (December 23, 2020)
* Update to Tokio v1 and Bytes v1.
* Disable `tracing`'s `log` feature. (It can still be enabled by a user in their own `Cargo.toml`.)
# 0.2.7 (October 22, 2020)
* Fix stream ref count when sending a push promise
* Fix receiving empty DATA frames in response to a HEAD request
* Fix handling of client disabling SERVER_PUSH
# 0.2.6 (July 13, 2020)
* Integrate `tracing` directly where `log` was used. (For 0.2.x, `log`s are still emitted by default.)
# 0.2.5 (May 6, 2020)
* Fix rare debug assert failure in store shutdown.
# 0.2.4 (March 30, 2020)
* Fix when receiving `SETTINGS_HEADER_TABLE_SIZE` setting.
# 0.2.3 (March 25, 2020)
* Fix server being able to accept `CONNECT` requests without `:scheme` or `:path`.
* Fix receiving a GOAWAY frame from updating the recv max ID, it should only update max send ID.
# 0.2.2 (March 3, 2020)
* Reduce size of `FlowControl` and `RecvStream`.
# 0.2.1 (December 6, 2019)
* Relax `Unpin` bounds on the send `Buf` generic.
# 0.2.0 (December 3, 2019)
* Add `server::Connection::set_initial_window_size` and `client::Connection::set_initial_window_size` which can adjust the `INITIAL_WINDOW_SIZE` setting on an existing connection (#421).
* Update to `http` v0.2.
* Update to `tokio` v0.2.
* Change `unstable-stream` feature to `stream`.
* Change `ReserveCapacity` to `FlowControl` (#423).
* Remove `From<io::Error>` for `Error`.
# 0.2.0-alpha.3 (October 1, 2019)
* Update to futures `0.3.0-alpha.19`.
* Update to tokio `0.2.0-alpha.6`.
# 0.2.0-alpha.2 (September 20, 2019)
* Add server support for `PUSH_PROMISE`s (#327).
* Update to tokio `0.2.0-alpha.5`.
* Change `stream` feature to `unstable-stream`.
# 0.2.0-alpha.1 (August 30, 2019)
* Update from `futures` 0.1 to `std::future::Future`.
* Update `AsyncRead`/`AsyncWrite` to `tokio-io` 0.2 alpha.
* Change `Stream` implementations to be optional, default disabled. Specific async and poll functions are now inherent, and `Stream` can be re-enabled with the `stream` cargo feature.
# 0.1.25 (June 28, 2019)
* Fix to send a `RST_STREAM` instead of `GOAWAY` if receiving a frame on a previously closed stream.
* Fix receiving trailers without an end-stream flag to be a stream error instead of connection error.
# 0.1.24 (June 17, 2019)
* Fix server wrongly rejecting requests that don't have an `:authority` header (#372).
# 0.1.23 (June 4, 2019)
* Fix leaking of received DATA frames if the `RecvStream` is never polled (#368).
# 0.1.22 (June 3, 2019)
* Fix rare panic when remote sends `RST_STREAM` or `GOAWAY` for a stream pending window capacity (#364).
# 0.1.21 (May 30, 2019)
* Fix write loop when a header didn't fit in write buffer.
# 0.1.20 (May 16, 2019)
* Fix lifetime conflict for older compilers.
# 0.1.19 (May 15, 2019)
* Fix rare crash if `CONTINUATION` frame resumed in the middle of headers with the same name.
* Fix HPACK encoder using an old evicted index for repeated header names.
# 0.1.18 (April 9, 2019)
* Fix `server::Connection::abrupt_shutdown` to no longer return the same error the user sent (#352).
# 0.1.17 (March 12, 2019)
* Add user PING support (#346).
* Fix notifying a `RecvStream` task if locally sending a reset.
* Fix connections "hanging" when all handles are dropped but some streams had been reset.
# 0.1.16 (January 24, 2019)
* Log header values when malformed (#342).
# 0.1.15 (January 12, 2019)
* Fix race condition bug related to shutting down the client (#338).
# 0.1.14 (December 5, 2018)
* Fix closed streams to always return window capacity to the connection (#334).
* Fix locking when `Debug` printing an `OpaqueStreamRef` (#333).
* Fix inverted split for DATA frame padding (#330).
* Reduce `Debug` noise for `Frame` (#329).
# 0.1.13 (October 16, 2018)
* Add client support for Push Promises (#314).
* Expose `io::Error` from `h2::Error` (#311)
* Misc bug fixes (#304, #309, #319, #313, #320).
# 0.1.12 (August 8, 2018)
* Fix initial send window size (#301).
* Fix panic when calling `reserve_capacity` after connection has been closed (#302).
* Fix handling of incoming `SETTINGS_INITIAL_WINDOW_SIZE`. (#299)
# 0.1.11 (July 31, 2018)
* Add `stream_id` accessors to public API types (#292).
* Fix potential panic when dropping clients (#295).
* Fix busy loop when shutting down server (#296).
# 0.1.10 (June 15, 2018)
* Fix potential panic in `SendRequest::poll_ready()` (#281).
* Fix infinite loop on reset connection during prefix (#285).
# 0.1.9 (May 31, 2018)
* Add `poll_reset` to `SendResponse` and `SendStream` (#279).
# 0.1.8 (May 23, 2018)
* Fix client bug when max streams is reached. (#277)
# 0.1.7 (May 14, 2018)
* Misc bug fixes (#266, #273, #261, #275).
# 0.1.6 (April 24, 2018)
* Misc bug fixes related to stream management (#258, #260, #262).
# 0.1.5 (April 6, 2018)
* Fix the `last_stream_id` sent during graceful GOAWAY (#254).
# 0.1.4 (April 5, 2018)
* Add `initial_connection_window_size` to client and server `Builder`s (#249).
* Add `graceful_shutdown` and `abrupt_shutdown` to `server::Connection`,
deprecating `close_connection` (#250).
# 0.1.3 (March 28, 2018)
* Allow configuring max streams before the peer's settings frame is
received (#242).
* Fix HPACK decoding bug with regards to large literals (#244).
* Fix state transition bug triggered by receiving a RST_STREAM frame (#247).
# 0.1.2 (March 13, 2018)
* Fix another bug relating to resetting connections and reaching
max concurrency (#238).
# 0.1.1 (March 8, 2018)
* When streams are dropped, close the connection (#222).
* Notify send tasks on connection error (#231).
* Fix bug relating to resetting connections and reaching max concurrency (#235).
* Normalize HTTP request path to satisfy HTTP/2.0 specification (#228).
* Update internal dependencies.
# 0.1.0 (Jan 12, 2018)
* Initial release

View File

@@ -0,0 +1,84 @@
# Contributing to _h2_ #
:balloon: Thanks for your help improving the project!
## Getting Help ##
If you have a question about the h2 library or have encountered problems using it, you may
[file an issue][issue] or ask a question on the [Tokio Discord][discord].
## Submitting a Pull Request ##
Do you have an improvement?
1. Submit an [issue][issue] describing your proposed change.
2. We will try to respond to your issue promptly.
3. Fork this repo, develop and test your code changes. See the project's [README](README.md) for further information about working in this repository.
4. Submit a pull request against this repo's `master` branch.
5. Your branch may be merged once all configured checks pass, including:
- Code review has been completed.
- The branch has passed tests in CI.
## Committing ##
When initially submitting a pull request, we prefer a single squashed commit. It
is preferable to split up contributions into multiple pull requests if the
changes are unrelated. All pull requests are squashed when merged, but
squashing yourself gives you better control over the commit message.
After the pull request is submitted, all changes should be done in separate
commits. This makes reviewing the evolution of the pull request easier. We will
squash all the changes into a single commit when we merge the pull request.
### Commit messages ###
Finalized commit messages should be in the following format:
```
Subject
Problem
Solution
Validation
```
#### Subject ####
- one line, <= 50 characters
- describe what is done; not the result
- use the active voice
- capitalize first word and proper nouns
- do not end in a period — this is a title/subject
- reference the github issue by number
##### Examples #####
```
bad: server disconnects should cause dst client disconnects.
good: Propagate disconnects from source to destination
```
```
bad: support tls servers
good: Introduce support for server-side TLS (#347)
```
#### Problem ####
Explain the context and why you're making that change. What is the problem
you're trying to solve? In some cases there is not a problem and this can be
thought of as being the motivation for your change.
#### Solution ####
Describe the modifications you've made.
#### Validation ####
Describe the testing you've done to validate your change. Performance-related
changes should include before- and after- benchmark results.
[issue]: https://github.com/hyperium/h2/issues/new
[discord]: https://discord.gg/tokio

View File

@@ -0,0 +1,80 @@
[package]
name = "h2"
# When releasing to crates.io:
# - Update CHANGELOG.md.
# - Create git tag
version = "0.4.11"
license = "MIT"
authors = [
"Carl Lerche <me@carllerche.com>",
"Sean McArthur <sean@seanmonstar.com>",
]
description = "An HTTP/2 client and server"
documentation = "https://docs.rs/h2"
repository = "https://github.com/hyperium/h2"
readme = "README.md"
keywords = ["http", "async", "non-blocking"]
categories = ["asynchronous", "web-programming", "network-programming"]
exclude = ["fixtures/**", "ci/**"]
edition = "2021"
rust-version = "1.63"
[features]
# Enables `futures::Stream` implementations for various types.
stream = []
# Enables **unstable** APIs. Any API exposed by this feature has no backwards
# compatibility guarantees. In other words, you should not use this feature for
# anything besides experimentation. Definitely **do not** publish a crate that
# depends on this feature.
unstable = []
[workspace]
members = [
"tests/h2-fuzz",
"tests/h2-tests",
"tests/h2-support",
"util/genfixture",
"util/genhuff",
]
[dependencies]
atomic-waker = "1.0.0"
futures-core = { version = "0.3", default-features = false }
futures-sink = { version = "0.3", default-features = false }
tokio-util = { version = "0.7.1", features = ["codec", "io"] }
tokio = { version = "1", features = ["io-util"] }
bytes = "1"
http = "1"
tracing = { version = "0.1.35", default-features = false, features = ["std"] }
fnv = "1.0.5"
slab = "0.4.2"
indexmap = { version = "2", features = ["std"] }
[dev-dependencies]
# Fuzzing
quickcheck = { version = "1.0.3", default-features = false }
rand = "0.8.4"
# HPACK fixtures
hex = "0.4.3"
walkdir = "2.3.2"
serde = "1.0.0"
serde_json = "1.0.0"
# Examples
tokio = { version = "1", features = ["rt-multi-thread", "macros", "sync", "net"] }
env_logger = { version = "0.10", default-features = false }
tokio-rustls = "0.26"
webpki-roots = "1"
[lints.rust]
unexpected_cfgs = { level = "warn", check-cfg = ["cfg(fuzzing)"] }
[package.metadata.docs.rs]
features = ["stream"]
[[bench]]
name = "main"
harness = false

25
patch/h2-0.4.10/LICENSE Normal file
View File

@@ -0,0 +1,25 @@
Copyright (c) 2017 h2 authors
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

71
patch/h2-0.4.10/README.md Normal file
View File

@@ -0,0 +1,71 @@
# H2
A Tokio aware, HTTP/2 client & server implementation for Rust.
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)
[![Crates.io](https://img.shields.io/crates/v/h2.svg)](https://crates.io/crates/h2)
[![Documentation](https://docs.rs/h2/badge.svg)][dox]
More information about this crate can be found in the [crate documentation][dox].
[dox]: https://docs.rs/h2
## Features
* Client and server HTTP/2 implementation.
* Implements the full HTTP/2 specification.
* Passes [h2spec](https://github.com/summerwind/h2spec).
* Focus on performance and correctness.
* Built on [Tokio](https://tokio.rs).
## Non goals
This crate is intended to only be an implementation of the HTTP/2
specification. It does not handle:
* Managing TCP connections
* HTTP 1.0 upgrade
* TLS
* Any feature not described by the HTTP/2 specification.
This crate is now used by [hyper](https://github.com/hyperium/hyper), which will provide all of these features.
## Usage
To use `h2`, first add this to your `Cargo.toml`:
```toml
[dependencies]
h2 = "0.4"
```
Next, add this to your crate:
```rust
extern crate h2;
use h2::server::Connection;
fn main() {
// ...
}
```
## FAQ
**How does h2 compare to [solicit] or [rust-http2]?**
The h2 library has implemented more of the details of the HTTP/2 specification
than any other Rust library. It also passes the [h2spec] set of tests. The h2
library is rapidly approaching "production ready" quality.
Besides the above, Solicit is built on blocking I/O and does not appear to be
actively maintained.
**Is this an embedded Java SQL database engine?**
[No](https://www.h2database.com).
[solicit]: https://github.com/mlalic/solicit
[rust-http2]: https://github.com/stepancheg/rust-http2
[h2spec]: https://github.com/summerwind/h2spec

View File

@@ -0,0 +1,148 @@
use bytes::Bytes;
use h2::{
client,
server::{self, SendResponse},
RecvStream,
};
use http::Request;
use std::{
error::Error,
time::{Duration, Instant},
};
use tokio::net::{TcpListener, TcpStream};
const NUM_REQUESTS_TO_SEND: usize = 100_000;
// The actual server.
async fn server(addr: &str) -> Result<(), Box<dyn Error + Send + Sync>> {
let listener = TcpListener::bind(addr).await?;
loop {
if let Ok((socket, _peer_addr)) = listener.accept().await {
tokio::spawn(async move {
if let Err(e) = serve(socket).await {
println!(" -> err={:?}", e);
}
});
}
}
}
async fn serve(socket: TcpStream) -> Result<(), Box<dyn Error + Send + Sync>> {
let mut connection = server::handshake(socket).await?;
while let Some(result) = connection.accept().await {
let (request, respond) = result?;
tokio::spawn(async move {
if let Err(e) = handle_request(request, respond).await {
println!("error while handling request: {}", e);
}
});
}
Ok(())
}
async fn handle_request(
mut request: Request<RecvStream>,
mut respond: SendResponse<Bytes>,
) -> Result<(), Box<dyn Error + Send + Sync>> {
let body = request.body_mut();
while let Some(data) = body.data().await {
let data = data?;
let _ = body.flow_control().release_capacity(data.len());
}
let response = http::Response::new(());
let mut send = respond.send_response(response, false)?;
send.send_data(Bytes::from_static(b"pong"), true)?;
Ok(())
}
// The benchmark
async fn send_requests(addr: &str) -> Result<(), Box<dyn Error>> {
let tcp = loop {
let Ok(tcp) = TcpStream::connect(addr).await else {
continue;
};
break tcp;
};
let (client, h2) = client::handshake(tcp).await?;
// Spawn a task to run the conn...
tokio::spawn(async move {
if let Err(e) = h2.await {
println!("GOT ERR={:?}", e);
}
});
let mut handles = Vec::with_capacity(NUM_REQUESTS_TO_SEND);
for _i in 0..NUM_REQUESTS_TO_SEND {
let mut client = client.clone();
let task = tokio::spawn(async move {
let request = Request::builder().body(()).unwrap();
let instant = Instant::now();
let (response, _) = client.send_request(request, true).unwrap();
let response = response.await.unwrap();
let mut body = response.into_body();
while let Some(_chunk) = body.data().await {}
instant.elapsed()
});
handles.push(task);
}
let instant = Instant::now();
let mut result = Vec::with_capacity(NUM_REQUESTS_TO_SEND);
for handle in handles {
result.push(handle.await.unwrap());
}
let mut sum = Duration::new(0, 0);
for r in result.iter() {
sum = sum.checked_add(*r).unwrap();
}
println!("Overall: {}ms.", instant.elapsed().as_millis());
println!("Fastest: {}ms", result.iter().min().unwrap().as_millis());
println!("Slowest: {}ms", result.iter().max().unwrap().as_millis());
println!(
"Avg : {}ms",
sum.div_f64(NUM_REQUESTS_TO_SEND as f64).as_millis()
);
Ok(())
}
fn main() {
let _ = env_logger::try_init();
let addr = "127.0.0.1:5928";
println!("H2 running in current-thread runtime at {addr}:");
std::thread::spawn(|| {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
rt.block_on(server(addr)).unwrap();
});
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
rt.block_on(send_requests(addr)).unwrap();
let addr = "127.0.0.1:5929";
println!("H2 running in multi-thread runtime at {addr}:");
std::thread::spawn(|| {
let rt = tokio::runtime::Builder::new_multi_thread()
.worker_threads(4)
.enable_all()
.build()
.unwrap();
rt.block_on(server(addr)).unwrap();
});
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap();
rt.block_on(send_requests(addr)).unwrap();
}

View File

@@ -0,0 +1,43 @@
#!/bin/bash
LOGFILE="/tmp/h2server.log"
override_h2spec=false
# Check for optional flag
while getopts "F" opt; do
case $opt in
F) override_h2spec=true ;;
*) echo "Usage: $0 [-o]"; exit 1 ;;
esac
done
if ! [ -e "/tmp/h2spec" ] || $override_h2spec ; then
# if we don't already have a h2spec executable, wget it from github
if [[ "$OSTYPE" == "darwin"* ]]; then
curl -L -o /tmp/h2spec_darwin_amd64.tar.gz https://github.com/summerwind/h2spec/releases/download/v2.1.1/h2spec_darwin_amd64.tar.gz \
&& tar xf /tmp/h2spec_darwin_amd64.tar.gz -C /tmp
else
curl -L -o /tmp/h2spec_linux_amd64.tar.gz https://github.com/summerwind/h2spec/releases/download/v2.1.1/h2spec_linux_amd64.tar.gz \
&& tar xf /tmp/h2spec_linux_amd64.tar.gz -C /tmp
fi
fi
cargo build --example server
exec 3< <(./target/debug/examples/server);
SERVER_PID=$!
# wait 'til the server is listening before running h2spec, and pipe server's
# stdout to a log file.
sed '/listening on Ok(127.0.0.1:5928)/q' <&3 ; cat <&3 > "${LOGFILE}" &
# run h2spec against the server, printing the server log if h2spec failed
/tmp/h2spec -p 5928
H2SPEC_STATUS=$?
if [ "${H2SPEC_STATUS}" -eq 0 ]; then
echo "h2spec passed!"
else
echo "h2spec failed! server logs:"
cat "${LOGFILE}"
fi
kill "${SERVER_PID}"
exit "${H2SPEC_STATUS}"

View File

@@ -0,0 +1,72 @@
use h2::client;
use http::{Method, Request};
use tokio::net::TcpStream;
use tokio_rustls::TlsConnector;
use tokio_rustls::rustls::{pki_types::ServerName, RootCertStore};
use std::error::Error;
use std::net::ToSocketAddrs;
const ALPN_H2: &str = "h2";
#[tokio::main]
pub async fn main() -> Result<(), Box<dyn Error>> {
let _ = env_logger::try_init();
let tls_client_config = std::sync::Arc::new({
let root_store = RootCertStore::from_iter(webpki_roots::TLS_SERVER_ROOTS.iter().cloned());
let mut c = tokio_rustls::rustls::ClientConfig::builder()
.with_root_certificates(root_store)
.with_no_client_auth();
c.alpn_protocols.push(ALPN_H2.as_bytes().to_owned());
c
});
// Sync DNS resolution.
let addr = "http2.akamai.com:443"
.to_socket_addrs()
.unwrap()
.next()
.unwrap();
println!("ADDR: {:?}", addr);
let tcp = TcpStream::connect(&addr).await?;
let dns_name = ServerName::try_from("http2.akamai.com").unwrap();
let connector = TlsConnector::from(tls_client_config);
let res = connector.connect(dns_name, tcp).await;
let tls = res.unwrap();
{
let (_, session) = tls.get_ref();
let negotiated_protocol = session.alpn_protocol();
assert_eq!(Some(ALPN_H2.as_bytes()), negotiated_protocol);
}
println!("Starting client handshake");
let (mut client, h2) = client::handshake(tls).await?;
println!("building request");
let request = Request::builder()
.method(Method::GET)
.uri("https://http2.akamai.com/")
.body(())
.unwrap();
println!("sending request");
let (response, other) = client.send_request(request, true).unwrap();
tokio::spawn(async move {
if let Err(e) = h2.await {
println!("GOT ERR={:?}", e);
}
});
println!("waiting on response : {:?}", other);
let (_, mut body) = response.await?.into_parts();
println!("processing body");
while let Some(chunk) = body.data().await {
println!("RX: {:?}", chunk?);
}
Ok(())
}

View File

@@ -0,0 +1,52 @@
use h2::client;
use http::{HeaderMap, Request};
use std::error::Error;
use tokio::net::TcpStream;
#[tokio::main]
pub async fn main() -> Result<(), Box<dyn Error>> {
let _ = env_logger::try_init();
let tcp = TcpStream::connect("127.0.0.1:5928").await?;
let (mut client, h2) = client::handshake(tcp).await?;
println!("sending request");
let request = Request::builder()
.uri("https://http2.akamai.com/")
.body(())
.unwrap();
let mut trailers = HeaderMap::new();
trailers.insert("zomg", "hello".parse().unwrap());
let (response, mut stream) = client.send_request(request, false).unwrap();
// send trailers
stream.send_trailers(trailers).unwrap();
// Spawn a task to run the conn...
tokio::spawn(async move {
if let Err(e) = h2.await {
println!("GOT ERR={:?}", e);
}
});
let response = response.await?;
println!("GOT RESPONSE: {:?}", response);
// Get the body
let mut body = response.into_body();
while let Some(chunk) = body.data().await {
println!("GOT CHUNK = {:?}", chunk?);
}
if let Some(trailers) = body.trailers().await? {
println!("GOT TRAILERS: {:?}", trailers);
}
Ok(())
}

View File

@@ -0,0 +1,65 @@
use std::error::Error;
use bytes::Bytes;
use h2::server::{self, SendResponse};
use h2::RecvStream;
use http::Request;
use tokio::net::{TcpListener, TcpStream};
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error + Send + Sync>> {
let _ = env_logger::try_init();
let listener = TcpListener::bind("127.0.0.1:5928").await?;
println!("listening on {:?}", listener.local_addr());
loop {
if let Ok((socket, _peer_addr)) = listener.accept().await {
tokio::spawn(async move {
if let Err(e) = serve(socket).await {
println!(" -> err={:?}", e);
}
});
}
}
}
async fn serve(socket: TcpStream) -> Result<(), Box<dyn Error + Send + Sync>> {
let mut connection = server::handshake(socket).await?;
println!("H2 connection bound");
while let Some(result) = connection.accept().await {
let (request, respond) = result?;
tokio::spawn(async move {
if let Err(e) = handle_request(request, respond).await {
println!("error while handling request: {}", e);
}
});
}
println!("~~~~~~~~~~~ H2 connection CLOSE !!!!!! ~~~~~~~~~~~");
Ok(())
}
async fn handle_request(
mut request: Request<RecvStream>,
mut respond: SendResponse<Bytes>,
) -> Result<(), Box<dyn Error + Send + Sync>> {
println!("GOT request: {:?}", request);
let body = request.body_mut();
while let Some(data) = body.data().await {
let data = data?;
println!("<<<< recv {:?}", data);
let _ = body.flow_control().release_capacity(data.len());
}
let response = http::Response::new(());
let mut send = respond.send_response(response, false)?;
println!(">>>> send");
send.send_data(Bytes::from_static(b"hello "), false)?;
send.send_data(Bytes::from_static(b"world\n"), true)?;
Ok(())
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,102 @@
use crate::proto::Error;
use std::{error, fmt, io};
/// Errors caused by sending a message
#[derive(Debug)]
pub enum SendError {
Connection(Error),
User(UserError),
}
/// Errors caused by users of the library
#[derive(Debug)]
pub enum UserError {
/// The stream ID is no longer accepting frames.
InactiveStreamId,
/// The stream is not currently expecting a frame of this type.
UnexpectedFrameType,
/// The payload size is too big
PayloadTooBig,
/// The application attempted to initiate too many streams to remote.
Rejected,
/// The released capacity is larger than claimed capacity.
ReleaseCapacityTooBig,
/// The stream ID space is overflowed.
///
/// A new connection is needed.
OverflowedStreamId,
/// Illegal headers, such as connection-specific headers.
MalformedHeaders,
/// Request submitted with relative URI.
MissingUriSchemeAndAuthority,
/// Calls `SendResponse::poll_reset` after having called `send_response`.
PollResetAfterSendResponse,
/// Calls `PingPong::send_ping` before receiving a pong.
SendPingWhilePending,
/// Tries to update local SETTINGS while ACK has not been received.
SendSettingsWhilePending,
/// Tries to send push promise to peer who has disabled server push
PeerDisabledServerPush,
}
// ===== impl SendError =====
impl error::Error for SendError {}
impl fmt::Display for SendError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match *self {
Self::Connection(ref e) => e.fmt(fmt),
Self::User(ref e) => e.fmt(fmt),
}
}
}
impl From<io::Error> for SendError {
fn from(src: io::Error) -> Self {
Self::Connection(src.into())
}
}
impl From<UserError> for SendError {
fn from(src: UserError) -> Self {
SendError::User(src)
}
}
// ===== impl UserError =====
impl error::Error for UserError {}
impl fmt::Display for UserError {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
use self::UserError::*;
fmt.write_str(match *self {
InactiveStreamId => "inactive stream",
UnexpectedFrameType => "unexpected frame type",
PayloadTooBig => "payload too big",
Rejected => "rejected",
ReleaseCapacityTooBig => "release capacity too big",
OverflowedStreamId => "stream ID overflowed",
MalformedHeaders => "malformed headers",
MissingUriSchemeAndAuthority => "request URI missing scheme and authority",
PollResetAfterSendResponse => "poll_reset after send_response is illegal",
SendPingWhilePending => "send_ping before received previous pong",
SendSettingsWhilePending => "sending SETTINGS before received previous ACK",
PeerDisabledServerPush => "sending PUSH_PROMISE to peer who disabled server push",
})
}
}

View File

@@ -0,0 +1,465 @@
use crate::frame::{self, Frame, Kind, Reason};
use crate::frame::{
DEFAULT_MAX_FRAME_SIZE, DEFAULT_SETTINGS_HEADER_TABLE_SIZE, MAX_MAX_FRAME_SIZE,
};
use crate::proto::Error;
use crate::hpack;
use futures_core::Stream;
use bytes::{Buf, BytesMut};
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
use tokio::io::AsyncRead;
use tokio_util::codec::FramedRead as InnerFramedRead;
use tokio_util::codec::{LengthDelimitedCodec, LengthDelimitedCodecError};
// 16 MB "sane default" taken from golang http2
const DEFAULT_SETTINGS_MAX_HEADER_LIST_SIZE: usize = 16 << 20;
#[derive(Debug)]
pub struct FramedRead<T> {
inner: InnerFramedRead<T, LengthDelimitedCodec>,
// hpack decoder state
hpack: hpack::Decoder,
max_header_list_size: usize,
max_continuation_frames: usize,
partial: Option<Partial>,
}
/// Partially loaded headers frame
#[derive(Debug)]
struct Partial {
/// Empty frame
frame: Continuable,
/// Partial header payload
buf: BytesMut,
continuation_frames_count: usize,
}
#[derive(Debug)]
enum Continuable {
Headers(frame::Headers),
PushPromise(frame::PushPromise),
}
impl<T> FramedRead<T> {
pub fn new(inner: InnerFramedRead<T, LengthDelimitedCodec>) -> FramedRead<T> {
let max_header_list_size = DEFAULT_SETTINGS_MAX_HEADER_LIST_SIZE;
let max_continuation_frames =
calc_max_continuation_frames(max_header_list_size, inner.decoder().max_frame_length());
FramedRead {
inner,
hpack: hpack::Decoder::new(DEFAULT_SETTINGS_HEADER_TABLE_SIZE),
max_header_list_size,
max_continuation_frames,
partial: None,
}
}
pub fn get_ref(&self) -> &T {
self.inner.get_ref()
}
pub fn get_mut(&mut self) -> &mut T {
self.inner.get_mut()
}
/// Returns the current max frame size setting
#[inline]
pub fn max_frame_size(&self) -> usize {
self.inner.decoder().max_frame_length()
}
/// Updates the max frame size setting.
///
/// Must be within 16,384 and 16,777,215.
#[inline]
pub fn set_max_frame_size(&mut self, val: usize) {
assert!(DEFAULT_MAX_FRAME_SIZE as usize <= val && val <= MAX_MAX_FRAME_SIZE as usize);
self.inner.decoder_mut().set_max_frame_length(val);
// Update max CONTINUATION frames too, since its based on this
self.max_continuation_frames = calc_max_continuation_frames(self.max_header_list_size, val);
}
/// Update the max header list size setting.
#[inline]
pub fn set_max_header_list_size(&mut self, val: usize) {
self.max_header_list_size = val;
// Update max CONTINUATION frames too, since its based on this
self.max_continuation_frames = calc_max_continuation_frames(val, self.max_frame_size());
}
/// Update the header table size setting.
#[inline]
pub fn set_header_table_size(&mut self, val: usize) {
self.hpack.queue_size_update(val);
}
}
fn calc_max_continuation_frames(header_max: usize, frame_max: usize) -> usize {
// At least this many frames needed to use max header list size
let min_frames_for_list = (header_max / frame_max).max(1);
// Some padding for imperfectly packed frames
// 25% without floats
let padding = min_frames_for_list >> 2;
min_frames_for_list.saturating_add(padding).max(5)
}
/// Decodes a frame.
///
/// This method is intentionally de-generified and outlined because it is very large.
fn decode_frame(
hpack: &mut hpack::Decoder,
max_header_list_size: usize,
max_continuation_frames: usize,
partial_inout: &mut Option<Partial>,
mut bytes: BytesMut,
) -> Result<Option<Frame>, Error> {
let span = tracing::trace_span!("FramedRead::decode_frame", offset = bytes.len());
let _e = span.enter();
tracing::trace!("decoding frame from {}B", bytes.len());
// Parse the head
let head = frame::Head::parse(&bytes);
if partial_inout.is_some() && head.kind() != Kind::Continuation {
proto_err!(conn: "expected CONTINUATION, got {:?}", head.kind());
return Err(Error::library_go_away(Reason::PROTOCOL_ERROR));
}
let kind = head.kind();
tracing::trace!(frame.kind = ?kind);
macro_rules! header_block {
($frame:ident, $head:ident, $bytes:ident) => ({
// Drop the frame header
$bytes.advance(frame::HEADER_LEN);
// Parse the header frame w/o parsing the payload
let (mut frame, mut payload) = match frame::$frame::load($head, $bytes) {
Ok(res) => res,
Err(frame::Error::InvalidDependencyId) => {
proto_err!(stream: "invalid HEADERS dependency ID");
// A stream cannot depend on itself. An endpoint MUST
// treat this as a stream error (Section 5.4.2) of type
// `PROTOCOL_ERROR`.
return Err(Error::library_reset($head.stream_id(), Reason::PROTOCOL_ERROR));
},
Err(e) => {
proto_err!(conn: "failed to load frame; err={:?}", e);
return Err(Error::library_go_away(Reason::PROTOCOL_ERROR));
}
};
let is_end_headers = frame.is_end_headers();
// Load the HPACK encoded headers
match frame.load_hpack(&mut payload, max_header_list_size, hpack) {
Ok(_) => {},
Err(frame::Error::Hpack(hpack::DecoderError::NeedMore(_))) if !is_end_headers => {},
Err(frame::Error::MalformedMessage) => {
let id = $head.stream_id();
proto_err!(stream: "malformed header block; stream={:?}", id);
return Err(Error::library_reset(id, Reason::PROTOCOL_ERROR));
},
Err(e) => {
proto_err!(conn: "failed HPACK decoding; err={:?}", e);
return Err(Error::library_go_away(Reason::PROTOCOL_ERROR));
}
}
if is_end_headers {
frame.into()
} else {
tracing::trace!("loaded partial header block");
// Defer returning the frame
*partial_inout = Some(Partial {
frame: Continuable::$frame(frame),
buf: payload,
continuation_frames_count: 0,
});
return Ok(None);
}
});
}
let frame = match kind {
Kind::Settings => {
let res = frame::Settings::load(head, &bytes[frame::HEADER_LEN..]);
res.map_err(|e| {
proto_err!(conn: "failed to load SETTINGS frame; err={:?}", e);
Error::library_go_away(Reason::PROTOCOL_ERROR)
})?
.into()
}
Kind::Ping => {
let res = frame::Ping::load(head, &bytes[frame::HEADER_LEN..]);
res.map_err(|e| {
proto_err!(conn: "failed to load PING frame; err={:?}", e);
Error::library_go_away(Reason::PROTOCOL_ERROR)
})?
.into()
}
Kind::WindowUpdate => {
let res = frame::WindowUpdate::load(head, &bytes[frame::HEADER_LEN..]);
res.map_err(|e| {
proto_err!(conn: "failed to load WINDOW_UPDATE frame; err={:?}", e);
Error::library_go_away(Reason::PROTOCOL_ERROR)
})?
.into()
}
Kind::Data => {
bytes.advance(frame::HEADER_LEN);
let res = frame::Data::load(head, bytes.freeze());
// TODO: Should this always be connection level? Probably not...
res.map_err(|e| {
proto_err!(conn: "failed to load DATA frame; err={:?}", e);
Error::library_go_away(Reason::PROTOCOL_ERROR)
})?
.into()
}
Kind::Headers => header_block!(Headers, head, bytes),
Kind::Reset => {
let res = frame::Reset::load(head, &bytes[frame::HEADER_LEN..]);
res.map_err(|e| {
proto_err!(conn: "failed to load RESET frame; err={:?}", e);
Error::library_go_away(Reason::PROTOCOL_ERROR)
})?
.into()
}
Kind::GoAway => {
let res = frame::GoAway::load(&bytes[frame::HEADER_LEN..]);
res.map_err(|e| {
proto_err!(conn: "failed to load GO_AWAY frame; err={:?}", e);
Error::library_go_away(Reason::PROTOCOL_ERROR)
})?
.into()
}
Kind::PushPromise => header_block!(PushPromise, head, bytes),
Kind::Priority => {
if head.stream_id() == 0 {
// Invalid stream identifier
proto_err!(conn: "invalid stream ID 0");
return Err(Error::library_go_away(Reason::PROTOCOL_ERROR));
}
match frame::Priority::load(head, &bytes[frame::HEADER_LEN..]) {
Ok(frame) => frame.into(),
Err(frame::Error::InvalidDependencyId) => {
// A stream cannot depend on itself. An endpoint MUST
// treat this as a stream error (Section 5.4.2) of type
// `PROTOCOL_ERROR`.
let id = head.stream_id();
proto_err!(stream: "PRIORITY invalid dependency ID; stream={:?}", id);
return Err(Error::library_reset(id, Reason::PROTOCOL_ERROR));
}
Err(e) => {
proto_err!(conn: "failed to load PRIORITY frame; err={:?};", e);
return Err(Error::library_go_away(Reason::PROTOCOL_ERROR));
}
}
}
Kind::Continuation => {
let is_end_headers = (head.flag() & 0x4) == 0x4;
let mut partial = match partial_inout.take() {
Some(partial) => partial,
None => {
proto_err!(conn: "received unexpected CONTINUATION frame");
return Err(Error::library_go_away(Reason::PROTOCOL_ERROR));
}
};
// The stream identifiers must match
if partial.frame.stream_id() != head.stream_id() {
proto_err!(conn: "CONTINUATION frame stream ID does not match previous frame stream ID");
return Err(Error::library_go_away(Reason::PROTOCOL_ERROR));
}
// Check for CONTINUATION flood
if is_end_headers {
partial.continuation_frames_count = 0;
} else {
let cnt = partial.continuation_frames_count + 1;
if cnt > max_continuation_frames {
tracing::debug!("too_many_continuations, max = {}", max_continuation_frames);
return Err(Error::library_go_away_data(
Reason::ENHANCE_YOUR_CALM,
"too_many_continuations",
));
} else {
partial.continuation_frames_count = cnt;
}
}
// Extend the buf
if partial.buf.is_empty() {
partial.buf = bytes.split_off(frame::HEADER_LEN);
} else {
if partial.frame.is_over_size() {
// If there was left over bytes previously, they may be
// needed to continue decoding, even though we will
// be ignoring this frame. This is done to keep the HPACK
// decoder state up-to-date.
//
// Still, we need to be careful, because if a malicious
// attacker were to try to send a gigantic string, such
// that it fits over multiple header blocks, we could
// grow memory uncontrollably again, and that'd be a shame.
//
// Instead, we use a simple heuristic to determine if
// we should continue to ignore decoding, or to tell
// the attacker to go away.
if partial.buf.len() + bytes.len() > max_header_list_size {
proto_err!(conn: "CONTINUATION frame header block size over ignorable limit");
return Err(Error::library_go_away(Reason::COMPRESSION_ERROR));
}
}
partial.buf.extend_from_slice(&bytes[frame::HEADER_LEN..]);
}
match partial
.frame
.load_hpack(&mut partial.buf, max_header_list_size, hpack)
{
Ok(_) => {}
Err(frame::Error::Hpack(hpack::DecoderError::NeedMore(_))) if !is_end_headers => {}
Err(frame::Error::MalformedMessage) => {
let id = head.stream_id();
proto_err!(stream: "malformed CONTINUATION frame; stream={:?}", id);
return Err(Error::library_reset(id, Reason::PROTOCOL_ERROR));
}
Err(e) => {
proto_err!(conn: "failed HPACK decoding; err={:?}", e);
return Err(Error::library_go_away(Reason::PROTOCOL_ERROR));
}
}
if is_end_headers {
partial.frame.into()
} else {
*partial_inout = Some(partial);
return Ok(None);
}
}
Kind::Unknown => {
// Unknown frames are ignored
return Ok(None);
}
};
Ok(Some(frame))
}
impl<T> Stream for FramedRead<T>
where
T: AsyncRead + Unpin,
{
type Item = Result<Frame, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let span = tracing::trace_span!("FramedRead::poll_next");
let _e = span.enter();
loop {
tracing::trace!("poll");
let bytes = match ready!(Pin::new(&mut self.inner).poll_next(cx)) {
Some(Ok(bytes)) => bytes,
Some(Err(e)) => return Poll::Ready(Some(Err(map_err(e)))),
None => return Poll::Ready(None),
};
tracing::trace!(read.bytes = bytes.len());
let Self {
ref mut hpack,
max_header_list_size,
ref mut partial,
max_continuation_frames,
..
} = *self;
if let Some(frame) = decode_frame(
hpack,
max_header_list_size,
max_continuation_frames,
partial,
bytes,
)? {
tracing::debug!(?frame, "received");
return Poll::Ready(Some(Ok(frame)));
}
}
}
}
fn map_err(err: io::Error) -> Error {
if let io::ErrorKind::InvalidData = err.kind() {
if let Some(custom) = err.get_ref() {
if custom.is::<LengthDelimitedCodecError>() {
return Error::library_go_away(Reason::FRAME_SIZE_ERROR);
}
}
}
err.into()
}
// ===== impl Continuable =====
impl Continuable {
fn stream_id(&self) -> frame::StreamId {
match *self {
Continuable::Headers(ref h) => h.stream_id(),
Continuable::PushPromise(ref p) => p.stream_id(),
}
}
fn is_over_size(&self) -> bool {
match *self {
Continuable::Headers(ref h) => h.is_over_size(),
Continuable::PushPromise(ref p) => p.is_over_size(),
}
}
fn load_hpack(
&mut self,
src: &mut BytesMut,
max_header_list_size: usize,
decoder: &mut hpack::Decoder,
) -> Result<(), frame::Error> {
match *self {
Continuable::Headers(ref mut h) => h.load_hpack(src, max_header_list_size, decoder),
Continuable::PushPromise(ref mut p) => p.load_hpack(src, max_header_list_size, decoder),
}
}
}
impl<T> From<Continuable> for Frame<T> {
fn from(cont: Continuable) -> Self {
match cont {
Continuable::Headers(mut headers) => {
headers.set_end_headers();
headers.into()
}
Continuable::PushPromise(mut push) => {
push.set_end_headers();
push.into()
}
}
}
}

View File

@@ -0,0 +1,373 @@
use crate::codec::UserError;
use crate::codec::UserError::*;
use crate::frame::{self, Frame, FrameSize};
use crate::hpack;
use bytes::{Buf, BufMut, BytesMut};
use std::pin::Pin;
use std::task::{Context, Poll};
use tokio::io::{AsyncRead, AsyncWrite, ReadBuf};
use tokio_util::io::poll_write_buf;
use std::io::{self, Cursor};
// A macro to get around a method needing to borrow &mut self
macro_rules! limited_write_buf {
($self:expr) => {{
let limit = $self.max_frame_size() + frame::HEADER_LEN;
$self.buf.get_mut().limit(limit)
}};
}
#[derive(Debug)]
pub struct FramedWrite<T, B> {
/// Upstream `AsyncWrite`
inner: T,
final_flush_done: bool,
encoder: Encoder<B>,
}
#[derive(Debug)]
struct Encoder<B> {
/// HPACK encoder
hpack: hpack::Encoder,
/// Write buffer
///
/// TODO: Should this be a ring buffer?
buf: Cursor<BytesMut>,
/// Next frame to encode
next: Option<Next<B>>,
/// Last data frame
last_data_frame: Option<frame::Data<B>>,
/// Max frame size, this is specified by the peer
max_frame_size: FrameSize,
/// Chain payloads bigger than this.
chain_threshold: usize,
/// Min buffer required to attempt to write a frame
min_buffer_capacity: usize,
}
#[derive(Debug)]
enum Next<B> {
Data(frame::Data<B>),
Continuation(frame::Continuation),
}
/// Initialize the connection with this amount of write buffer.
///
/// The minimum MAX_FRAME_SIZE is 16kb, so always be able to send a HEADERS
/// frame that big.
const DEFAULT_BUFFER_CAPACITY: usize = 16 * 1_024;
/// Chain payloads bigger than this when vectored I/O is enabled. The remote
/// will never advertise a max frame size less than this (well, the spec says
/// the max frame size can't be less than 16kb, so not even close).
const CHAIN_THRESHOLD: usize = 256;
/// Chain payloads bigger than this when vectored I/O is **not** enabled.
/// A larger value in this scenario will reduce the number of small and
/// fragmented data being sent, and hereby improve the throughput.
const CHAIN_THRESHOLD_WITHOUT_VECTORED_IO: usize = 1024;
// TODO: Make generic
impl<T, B> FramedWrite<T, B>
where
T: AsyncWrite + Unpin,
B: Buf,
{
pub fn new(inner: T) -> FramedWrite<T, B> {
let chain_threshold = if inner.is_write_vectored() {
CHAIN_THRESHOLD
} else {
CHAIN_THRESHOLD_WITHOUT_VECTORED_IO
};
FramedWrite {
inner,
final_flush_done: false,
encoder: Encoder {
hpack: hpack::Encoder::default(),
buf: Cursor::new(BytesMut::with_capacity(DEFAULT_BUFFER_CAPACITY)),
next: None,
last_data_frame: None,
max_frame_size: frame::DEFAULT_MAX_FRAME_SIZE,
chain_threshold,
min_buffer_capacity: chain_threshold + frame::HEADER_LEN,
},
}
}
/// Returns `Ready` when `send` is able to accept a frame
///
/// Calling this function may result in the current contents of the buffer
/// to be flushed to `T`.
pub fn poll_ready(&mut self, cx: &mut Context) -> Poll<io::Result<()>> {
if !self.encoder.has_capacity() {
// Try flushing
ready!(self.flush(cx))?;
if !self.encoder.has_capacity() {
return Poll::Pending;
}
}
Poll::Ready(Ok(()))
}
/// Buffer a frame.
///
/// `poll_ready` must be called first to ensure that a frame may be
/// accepted.
pub fn buffer(&mut self, item: Frame<B>) -> Result<(), UserError> {
self.encoder.buffer(item)
}
/// Flush buffered data to the wire
pub fn flush(&mut self, cx: &mut Context) -> Poll<io::Result<()>> {
let span = tracing::trace_span!("FramedWrite::flush");
let _e = span.enter();
loop {
while !self.encoder.is_empty() {
let n = match self.encoder.next {
Some(Next::Data(ref mut frame)) => {
tracing::trace!(queued_data_frame = true);
let mut buf = (&mut self.encoder.buf).chain(frame.payload_mut());
ready!(poll_write_buf(Pin::new(&mut self.inner), cx, &mut buf))?
}
_ => {
tracing::trace!(queued_data_frame = false);
ready!(poll_write_buf(
Pin::new(&mut self.inner),
cx,
&mut self.encoder.buf
))?
}
};
if n == 0 {
return Poll::Ready(Err(io::Error::new(
io::ErrorKind::WriteZero,
"failed to write frame to socket",
)));
}
}
match self.encoder.unset_frame() {
ControlFlow::Continue => (),
ControlFlow::Break => break,
}
}
tracing::trace!("flushing buffer");
// Flush the upstream
ready!(Pin::new(&mut self.inner).poll_flush(cx))?;
Poll::Ready(Ok(()))
}
/// Close the codec
pub fn shutdown(&mut self, cx: &mut Context) -> Poll<io::Result<()>> {
if !self.final_flush_done {
ready!(self.flush(cx))?;
self.final_flush_done = true;
}
Pin::new(&mut self.inner).poll_shutdown(cx)
}
}
#[must_use]
enum ControlFlow {
Continue,
Break,
}
impl<B> Encoder<B>
where
B: Buf,
{
fn unset_frame(&mut self) -> ControlFlow {
// Clear internal buffer
self.buf.set_position(0);
self.buf.get_mut().clear();
// The data frame has been written, so unset it
match self.next.take() {
Some(Next::Data(frame)) => {
self.last_data_frame = Some(frame);
debug_assert!(self.is_empty());
ControlFlow::Break
}
Some(Next::Continuation(frame)) => {
// Buffer the continuation frame, then try to write again
let mut buf = limited_write_buf!(self);
if let Some(continuation) = frame.encode(&mut buf) {
self.next = Some(Next::Continuation(continuation));
}
ControlFlow::Continue
}
None => ControlFlow::Break,
}
}
fn buffer(&mut self, item: Frame<B>) -> Result<(), UserError> {
// Ensure that we have enough capacity to accept the write.
assert!(self.has_capacity());
let span = tracing::trace_span!("FramedWrite::buffer", frame = ?item);
let _e = span.enter();
tracing::debug!(frame = ?item, "send");
match item {
Frame::Data(mut v) => {
// Ensure that the payload is not greater than the max frame.
let len = v.payload().remaining();
if len > self.max_frame_size() {
return Err(PayloadTooBig);
}
if len >= self.chain_threshold {
let head = v.head();
// Encode the frame head to the buffer
head.encode(len, self.buf.get_mut());
if self.buf.get_ref().remaining() < self.chain_threshold {
let extra_bytes = self.chain_threshold - self.buf.remaining();
self.buf.get_mut().put(v.payload_mut().take(extra_bytes));
}
// Save the data frame
self.next = Some(Next::Data(v));
} else {
v.encode_chunk(self.buf.get_mut());
// The chunk has been fully encoded, so there is no need to
// keep it around
assert_eq!(v.payload().remaining(), 0, "chunk not fully encoded");
// Save off the last frame...
self.last_data_frame = Some(v);
}
}
Frame::Headers(v) => {
let mut buf = limited_write_buf!(self);
if let Some(continuation) = v.encode(&mut self.hpack, &mut buf) {
self.next = Some(Next::Continuation(continuation));
}
}
Frame::PushPromise(v) => {
let mut buf = limited_write_buf!(self);
if let Some(continuation) = v.encode(&mut self.hpack, &mut buf) {
self.next = Some(Next::Continuation(continuation));
}
}
Frame::Settings(v) => {
v.encode(self.buf.get_mut());
tracing::trace!(rem = self.buf.remaining(), "encoded settings");
}
Frame::GoAway(v) => {
v.encode(self.buf.get_mut());
tracing::trace!(rem = self.buf.remaining(), "encoded go_away");
}
Frame::Ping(v) => {
v.encode(self.buf.get_mut());
tracing::trace!(rem = self.buf.remaining(), "encoded ping");
}
Frame::WindowUpdate(v) => {
v.encode(self.buf.get_mut());
tracing::trace!(rem = self.buf.remaining(), "encoded window_update");
}
Frame::Priority(_) => {
/*
v.encode(self.buf.get_mut());
tracing::trace!("encoded priority; rem={:?}", self.buf.remaining());
*/
unimplemented!();
}
Frame::Reset(v) => {
v.encode(self.buf.get_mut());
tracing::trace!(rem = self.buf.remaining(), "encoded reset");
}
}
Ok(())
}
fn has_capacity(&self) -> bool {
self.next.is_none()
&& (self.buf.get_ref().capacity() - self.buf.get_ref().len()
>= self.min_buffer_capacity)
}
fn is_empty(&self) -> bool {
match self.next {
Some(Next::Data(ref frame)) => !frame.payload().has_remaining(),
_ => !self.buf.has_remaining(),
}
}
}
impl<B> Encoder<B> {
fn max_frame_size(&self) -> usize {
self.max_frame_size as usize
}
}
impl<T, B> FramedWrite<T, B> {
/// Returns the max frame size that can be sent
pub fn max_frame_size(&self) -> usize {
self.encoder.max_frame_size()
}
/// Set the peer's max frame size.
pub fn set_max_frame_size(&mut self, val: usize) {
assert!(val <= frame::MAX_MAX_FRAME_SIZE as usize);
self.encoder.max_frame_size = val as FrameSize;
}
/// Set the peer's header table size.
pub fn set_header_table_size(&mut self, val: usize) {
self.encoder.hpack.update_max_size(val);
}
/// Retrieve the last data frame that has been sent
pub fn take_last_data_frame(&mut self) -> Option<frame::Data<B>> {
self.encoder.last_data_frame.take()
}
pub fn get_mut(&mut self) -> &mut T {
&mut self.inner
}
}
impl<T: AsyncRead + Unpin, B> AsyncRead for FramedWrite<T, B> {
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf,
) -> Poll<io::Result<()>> {
Pin::new(&mut self.inner).poll_read(cx, buf)
}
}
// We never project the Pin to `B`.
impl<T: Unpin, B> Unpin for FramedWrite<T, B> {}
#[cfg(feature = "unstable")]
mod unstable {
use super::*;
impl<T, B> FramedWrite<T, B> {
pub fn get_ref(&self) -> &T {
&self.inner
}
}
}

View File

@@ -0,0 +1,206 @@
mod error;
mod framed_read;
mod framed_write;
pub use self::error::{SendError, UserError};
use self::framed_read::FramedRead;
use self::framed_write::FramedWrite;
use crate::frame::{self, Data, Frame};
use crate::proto::Error;
use bytes::Buf;
use futures_core::Stream;
use futures_sink::Sink;
use std::pin::Pin;
use std::task::{Context, Poll};
use tokio::io::{AsyncRead, AsyncWrite};
use tokio_util::codec::length_delimited;
use std::io;
#[derive(Debug)]
pub struct Codec<T, B> {
inner: FramedRead<FramedWrite<T, B>>,
}
impl<T, B> Codec<T, B>
where
T: AsyncRead + AsyncWrite + Unpin,
B: Buf,
{
/// Returns a new `Codec` with the default max frame size
#[inline]
pub fn new(io: T) -> Self {
Self::with_max_recv_frame_size(io, frame::DEFAULT_MAX_FRAME_SIZE as usize)
}
/// Returns a new `Codec` with the given maximum frame size
pub fn with_max_recv_frame_size(io: T, max_frame_size: usize) -> Self {
// Wrap with writer
let framed_write = FramedWrite::new(io);
// Delimit the frames
let delimited = length_delimited::Builder::new()
.big_endian()
.length_field_length(3)
.length_adjustment(9)
.num_skip(0) // Don't skip the header
.new_read(framed_write);
let mut inner = FramedRead::new(delimited);
// Use FramedRead's method since it checks the value is within range.
inner.set_max_frame_size(max_frame_size);
Codec { inner }
}
}
impl<T, B> Codec<T, B> {
/// Updates the max received frame size.
///
/// The change takes effect the next time a frame is decoded. In other
/// words, if a frame is currently in process of being decoded with a frame
/// size greater than `val` but less than the max frame size in effect
/// before calling this function, then the frame will be allowed.
#[inline]
pub fn set_max_recv_frame_size(&mut self, val: usize) {
self.inner.set_max_frame_size(val)
}
/// Returns the current max received frame size setting.
///
/// This is the largest size this codec will accept from the wire. Larger
/// frames will be rejected.
#[cfg(feature = "unstable")]
#[inline]
pub fn max_recv_frame_size(&self) -> usize {
self.inner.max_frame_size()
}
/// Returns the max frame size that can be sent to the peer.
pub fn max_send_frame_size(&self) -> usize {
self.inner.get_ref().max_frame_size()
}
/// Set the peer's max frame size.
pub fn set_max_send_frame_size(&mut self, val: usize) {
self.framed_write().set_max_frame_size(val)
}
/// Set the peer's header table size size.
pub fn set_send_header_table_size(&mut self, val: usize) {
self.framed_write().set_header_table_size(val)
}
/// Set the decoder header table size size.
pub fn set_recv_header_table_size(&mut self, val: usize) {
self.inner.set_header_table_size(val)
}
/// Set the max header list size that can be received.
pub fn set_max_recv_header_list_size(&mut self, val: usize) {
self.inner.set_max_header_list_size(val);
}
/// Get a reference to the inner stream.
#[cfg(feature = "unstable")]
pub fn get_ref(&self) -> &T {
self.inner.get_ref().get_ref()
}
/// Get a mutable reference to the inner stream.
pub fn get_mut(&mut self) -> &mut T {
self.inner.get_mut().get_mut()
}
/// Takes the data payload value that was fully written to the socket
pub(crate) fn take_last_data_frame(&mut self) -> Option<Data<B>> {
self.framed_write().take_last_data_frame()
}
fn framed_write(&mut self) -> &mut FramedWrite<T, B> {
self.inner.get_mut()
}
}
impl<T, B> Codec<T, B>
where
T: AsyncWrite + Unpin,
B: Buf,
{
/// Returns `Ready` when the codec can buffer a frame
pub fn poll_ready(&mut self, cx: &mut Context) -> Poll<io::Result<()>> {
self.framed_write().poll_ready(cx)
}
/// Buffer a frame.
///
/// `poll_ready` must be called first to ensure that a frame may be
/// accepted.
///
/// TODO: Rename this to avoid conflicts with Sink::buffer
pub fn buffer(&mut self, item: Frame<B>) -> Result<(), UserError> {
self.framed_write().buffer(item)
}
/// Flush buffered data to the wire
pub fn flush(&mut self, cx: &mut Context) -> Poll<io::Result<()>> {
self.framed_write().flush(cx)
}
/// Shutdown the send half
pub fn shutdown(&mut self, cx: &mut Context) -> Poll<io::Result<()>> {
self.framed_write().shutdown(cx)
}
}
impl<T, B> Stream for Codec<T, B>
where
T: AsyncRead + Unpin,
{
type Item = Result<Frame, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
Pin::new(&mut self.inner).poll_next(cx)
}
}
impl<T, B> Sink<Frame<B>> for Codec<T, B>
where
T: AsyncWrite + Unpin,
B: Buf,
{
type Error = SendError;
fn start_send(mut self: Pin<&mut Self>, item: Frame<B>) -> Result<(), Self::Error> {
Codec::buffer(&mut self, item)?;
Ok(())
}
/// Returns `Ready` when the codec can buffer a frame
fn poll_ready(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.framed_write().poll_ready(cx).map_err(Into::into)
}
/// Flush buffered data to the wire
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.framed_write().flush(cx).map_err(Into::into)
}
fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
ready!(self.shutdown(cx))?;
Poll::Ready(Ok(()))
}
}
// TODO: remove (or improve) this
impl<T> From<T> for Codec<T, bytes::Bytes>
where
T: AsyncRead + AsyncWrite + Unpin,
{
fn from(src: T) -> Self {
Self::new(src)
}
}

View File

@@ -0,0 +1,211 @@
use crate::codec::{SendError, UserError};
use crate::frame::StreamId;
use crate::proto::{self, Initiator};
use bytes::Bytes;
use std::{error, fmt, io};
pub use crate::frame::Reason;
/// Represents HTTP/2 operation errors.
///
/// `Error` covers error cases raised by protocol errors caused by the
/// peer, I/O (transport) errors, and errors caused by the user of the library.
///
/// If the error was caused by the remote peer, then it will contain a
/// [`Reason`] which can be obtained with the [`reason`] function.
///
/// [`Reason`]: struct.Reason.html
/// [`reason`]: #method.reason
#[derive(Debug)]
pub struct Error {
kind: Kind,
}
#[derive(Debug)]
enum Kind {
/// A RST_STREAM frame was received or sent.
#[allow(dead_code)]
Reset(StreamId, Reason, Initiator),
/// A GO_AWAY frame was received or sent.
GoAway(Bytes, Reason, Initiator),
/// The user created an error from a bare Reason.
Reason(Reason),
/// An error resulting from an invalid action taken by the user of this
/// library.
User(UserError),
/// An `io::Error` occurred while trying to read or write.
Io(io::Error),
}
// ===== impl Error =====
impl Error {
/// If the error was caused by the remote peer, the error reason.
///
/// This is either an error received by the peer or caused by an invalid
/// action taken by the peer (i.e. a protocol error).
pub fn reason(&self) -> Option<Reason> {
match self.kind {
Kind::Reset(_, reason, _) | Kind::GoAway(_, reason, _) | Kind::Reason(reason) => {
Some(reason)
}
_ => None,
}
}
/// Returns true if the error is an io::Error
pub fn is_io(&self) -> bool {
matches!(self.kind, Kind::Io(..))
}
/// Returns the error if the error is an io::Error
pub fn get_io(&self) -> Option<&io::Error> {
match self.kind {
Kind::Io(ref e) => Some(e),
_ => None,
}
}
/// Returns the error if the error is an io::Error
pub fn into_io(self) -> Option<io::Error> {
match self.kind {
Kind::Io(e) => Some(e),
_ => None,
}
}
pub(crate) fn from_io(err: io::Error) -> Self {
Error {
kind: Kind::Io(err),
}
}
/// Returns true if the error is from a `GOAWAY`.
pub fn is_go_away(&self) -> bool {
matches!(self.kind, Kind::GoAway(..))
}
/// Returns true if the error is from a `RST_STREAM`.
pub fn is_reset(&self) -> bool {
matches!(self.kind, Kind::Reset(..))
}
/// Returns true if the error was received in a frame from the remote.
///
/// Such as from a received `RST_STREAM` or `GOAWAY` frame.
pub fn is_remote(&self) -> bool {
matches!(
self.kind,
Kind::GoAway(_, _, Initiator::Remote) | Kind::Reset(_, _, Initiator::Remote)
)
}
/// Returns true if the error was created by `h2`.
///
/// Such as noticing some protocol error and sending a GOAWAY or RST_STREAM.
pub fn is_library(&self) -> bool {
matches!(
self.kind,
Kind::GoAway(_, _, Initiator::Library) | Kind::Reset(_, _, Initiator::Library)
)
}
}
impl From<proto::Error> for Error {
fn from(src: proto::Error) -> Error {
use crate::proto::Error::*;
Error {
kind: match src {
Reset(stream_id, reason, initiator) => Kind::Reset(stream_id, reason, initiator),
GoAway(debug_data, reason, initiator) => {
Kind::GoAway(debug_data, reason, initiator)
}
Io(kind, inner) => {
Kind::Io(inner.map_or_else(|| kind.into(), |inner| io::Error::new(kind, inner)))
}
},
}
}
}
impl From<Reason> for Error {
fn from(src: Reason) -> Error {
Error {
kind: Kind::Reason(src),
}
}
}
impl From<SendError> for Error {
fn from(src: SendError) -> Error {
match src {
SendError::User(e) => e.into(),
SendError::Connection(e) => e.into(),
}
}
}
impl From<UserError> for Error {
fn from(src: UserError) -> Error {
Error {
kind: Kind::User(src),
}
}
}
impl fmt::Display for Error {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let debug_data = match self.kind {
Kind::Reset(_, reason, Initiator::User) => {
return write!(fmt, "stream error sent by user: {}", reason)
}
Kind::Reset(_, reason, Initiator::Library) => {
return write!(fmt, "stream error detected: {}", reason)
}
Kind::Reset(_, reason, Initiator::Remote) => {
return write!(fmt, "stream error received: {}", reason)
}
Kind::GoAway(ref debug_data, reason, Initiator::User) => {
write!(fmt, "connection error sent by user: {}", reason)?;
debug_data
}
Kind::GoAway(ref debug_data, reason, Initiator::Library) => {
write!(fmt, "connection error detected: {}", reason)?;
debug_data
}
Kind::GoAway(ref debug_data, reason, Initiator::Remote) => {
write!(fmt, "connection error received: {}", reason)?;
debug_data
}
Kind::Reason(reason) => return write!(fmt, "protocol error: {}", reason),
Kind::User(ref e) => return write!(fmt, "user error: {}", e),
Kind::Io(ref e) => return e.fmt(fmt),
};
if !debug_data.is_empty() {
write!(fmt, " ({:?})", debug_data)?;
}
Ok(())
}
}
impl error::Error for Error {}
#[cfg(test)]
mod tests {
use super::Error;
use crate::Reason;
#[test]
fn error_from_reason() {
let err = Error::from(Reason::HTTP_1_1_REQUIRED);
assert_eq!(err.reason(), Some(Reason::HTTP_1_1_REQUIRED));
}
}

View File

@@ -0,0 +1,55 @@
//! Extensions specific to the HTTP/2 protocol.
use crate::hpack::BytesStr;
use bytes::Bytes;
use std::fmt;
/// Represents the `:protocol` pseudo-header used by
/// the [Extended CONNECT Protocol].
///
/// [Extended CONNECT Protocol]: https://datatracker.ietf.org/doc/html/rfc8441#section-4
#[derive(Clone, Eq, PartialEq)]
pub struct Protocol {
value: BytesStr,
}
impl Protocol {
/// Converts a static string to a protocol name.
pub const fn from_static(value: &'static str) -> Self {
Self {
value: BytesStr::from_static(value),
}
}
/// Returns a str representation of the header.
pub fn as_str(&self) -> &str {
self.value.as_str()
}
pub(crate) fn try_from(bytes: Bytes) -> Result<Self, std::str::Utf8Error> {
Ok(Self {
value: BytesStr::try_from(bytes)?,
})
}
}
impl<'a> From<&'a str> for Protocol {
fn from(value: &'a str) -> Self {
Self {
value: BytesStr::from(value),
}
}
}
impl AsRef<[u8]> for Protocol {
fn as_ref(&self) -> &[u8] {
self.value.as_ref()
}
}
impl fmt::Debug for Protocol {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.value.fmt(f)
}
}

View File

@@ -0,0 +1,227 @@
use crate::frame::{util, Error, Frame, Head, Kind, StreamId};
use bytes::{Buf, BufMut, Bytes};
use std::fmt;
/// Data frame
///
/// Data frames convey arbitrary, variable-length sequences of octets associated
/// with a stream. One or more DATA frames are used, for instance, to carry HTTP
/// request or response payloads.
#[derive(Eq, PartialEq)]
pub struct Data<T = Bytes> {
stream_id: StreamId,
data: T,
flags: DataFlags,
pad_len: Option<u8>,
}
#[derive(Copy, Clone, Default, Eq, PartialEq)]
struct DataFlags(u8);
const END_STREAM: u8 = 0x1;
const PADDED: u8 = 0x8;
const ALL: u8 = END_STREAM | PADDED;
impl<T> Data<T> {
/// Creates a new DATA frame.
pub fn new(stream_id: StreamId, payload: T) -> Self {
assert!(!stream_id.is_zero());
Data {
stream_id,
data: payload,
flags: DataFlags::default(),
pad_len: None,
}
}
/// Returns the stream identifier that this frame is associated with.
///
/// This cannot be a zero stream identifier.
pub fn stream_id(&self) -> StreamId {
self.stream_id
}
/// Gets the value of the `END_STREAM` flag for this frame.
///
/// If true, this frame is the last that the endpoint will send for the
/// identified stream.
///
/// Setting this flag causes the stream to enter one of the "half-closed"
/// states or the "closed" state (Section 5.1).
pub fn is_end_stream(&self) -> bool {
self.flags.is_end_stream()
}
/// Sets the value for the `END_STREAM` flag on this frame.
pub fn set_end_stream(&mut self, val: bool) {
if val {
self.flags.set_end_stream();
} else {
self.flags.unset_end_stream();
}
}
/// Returns whether the `PADDED` flag is set on this frame.
#[cfg(feature = "unstable")]
pub fn is_padded(&self) -> bool {
self.flags.is_padded()
}
/// Sets the value for the `PADDED` flag on this frame.
#[cfg(feature = "unstable")]
pub fn set_padded(&mut self) {
self.flags.set_padded();
}
/// Returns a reference to this frame's payload.
///
/// This does **not** include any padding that might have been originally
/// included.
pub fn payload(&self) -> &T {
&self.data
}
/// Returns a mutable reference to this frame's payload.
///
/// This does **not** include any padding that might have been originally
/// included.
pub fn payload_mut(&mut self) -> &mut T {
&mut self.data
}
/// Consumes `self` and returns the frame's payload.
///
/// This does **not** include any padding that might have been originally
/// included.
pub fn into_payload(self) -> T {
self.data
}
pub(crate) fn head(&self) -> Head {
Head::new(Kind::Data, self.flags.into(), self.stream_id)
}
pub(crate) fn map<F, U>(self, f: F) -> Data<U>
where
F: FnOnce(T) -> U,
{
Data {
stream_id: self.stream_id,
data: f(self.data),
flags: self.flags,
pad_len: self.pad_len,
}
}
}
impl Data<Bytes> {
pub(crate) fn load(head: Head, mut payload: Bytes) -> Result<Self, Error> {
let flags = DataFlags::load(head.flag());
// The stream identifier must not be zero
if head.stream_id().is_zero() {
return Err(Error::InvalidStreamId);
}
let pad_len = if flags.is_padded() {
let len = util::strip_padding(&mut payload)?;
Some(len)
} else {
None
};
Ok(Data {
stream_id: head.stream_id(),
data: payload,
flags,
pad_len,
})
}
}
impl<T: Buf> Data<T> {
/// Encode the data frame into the `dst` buffer.
///
/// # Panics
///
/// Panics if `dst` cannot contain the data frame.
pub(crate) fn encode_chunk<U: BufMut>(&mut self, dst: &mut U) {
let len = self.data.remaining();
assert!(dst.remaining_mut() >= len);
self.head().encode(len, dst);
dst.put(&mut self.data);
}
}
impl<T> From<Data<T>> for Frame<T> {
fn from(src: Data<T>) -> Self {
Frame::Data(src)
}
}
impl<T> fmt::Debug for Data<T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut f = fmt.debug_struct("Data");
f.field("stream_id", &self.stream_id);
if !self.flags.is_empty() {
f.field("flags", &self.flags);
}
if let Some(ref pad_len) = self.pad_len {
f.field("pad_len", pad_len);
}
// `data` bytes purposefully excluded
f.finish()
}
}
// ===== impl DataFlags =====
impl DataFlags {
fn load(bits: u8) -> DataFlags {
DataFlags(bits & ALL)
}
fn is_empty(&self) -> bool {
self.0 == 0
}
fn is_end_stream(&self) -> bool {
self.0 & END_STREAM == END_STREAM
}
fn set_end_stream(&mut self) {
self.0 |= END_STREAM
}
fn unset_end_stream(&mut self) {
self.0 &= !END_STREAM
}
fn is_padded(&self) -> bool {
self.0 & PADDED == PADDED
}
#[cfg(feature = "unstable")]
fn set_padded(&mut self) {
self.0 |= PADDED
}
}
impl From<DataFlags> for u8 {
fn from(src: DataFlags) -> u8 {
src.0
}
}
impl fmt::Debug for DataFlags {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
util::debug_flags(fmt, self.0)
.flag_if(self.is_end_stream(), "END_STREAM")
.flag_if(self.is_padded(), "PADDED")
.finish()
}
}

View File

@@ -0,0 +1,87 @@
use std::fmt;
use bytes::{BufMut, Bytes};
use crate::frame::{self, Error, Head, Kind, Reason, StreamId};
#[derive(Clone, Eq, PartialEq)]
pub struct GoAway {
last_stream_id: StreamId,
error_code: Reason,
debug_data: Bytes,
}
impl GoAway {
pub fn new(last_stream_id: StreamId, reason: Reason) -> Self {
GoAway {
last_stream_id,
error_code: reason,
debug_data: Bytes::new(),
}
}
pub fn with_debug_data(last_stream_id: StreamId, reason: Reason, debug_data: Bytes) -> Self {
Self {
last_stream_id,
error_code: reason,
debug_data,
}
}
pub fn last_stream_id(&self) -> StreamId {
self.last_stream_id
}
pub fn reason(&self) -> Reason {
self.error_code
}
pub fn debug_data(&self) -> &Bytes {
&self.debug_data
}
pub fn load(payload: &[u8]) -> Result<GoAway, Error> {
if payload.len() < 8 {
return Err(Error::BadFrameSize);
}
let (last_stream_id, _) = StreamId::parse(&payload[..4]);
let error_code = unpack_octets_4!(payload, 4, u32);
let debug_data = Bytes::copy_from_slice(&payload[8..]);
Ok(GoAway {
last_stream_id,
error_code: error_code.into(),
debug_data,
})
}
pub fn encode<B: BufMut>(&self, dst: &mut B) {
tracing::trace!("encoding GO_AWAY; code={:?}", self.error_code);
let head = Head::new(Kind::GoAway, 0, StreamId::zero());
head.encode(8 + self.debug_data.len(), dst);
dst.put_u32(self.last_stream_id.into());
dst.put_u32(self.error_code.into());
dst.put(self.debug_data.slice(..));
}
}
impl<B> From<GoAway> for frame::Frame<B> {
fn from(src: GoAway) -> Self {
frame::Frame::GoAway(src)
}
}
impl fmt::Debug for GoAway {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut builder = f.debug_struct("GoAway");
builder.field("error_code", &self.error_code);
builder.field("last_stream_id", &self.last_stream_id);
if !self.debug_data.is_empty() {
builder.field("debug_data", &self.debug_data);
}
builder.finish()
}
}

View File

@@ -0,0 +1,94 @@
use super::StreamId;
use bytes::BufMut;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct Head {
kind: Kind,
flag: u8,
stream_id: StreamId,
}
#[repr(u8)]
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Kind {
Data = 0,
Headers = 1,
Priority = 2,
Reset = 3,
Settings = 4,
PushPromise = 5,
Ping = 6,
GoAway = 7,
WindowUpdate = 8,
Continuation = 9,
Unknown,
}
// ===== impl Head =====
impl Head {
pub fn new(kind: Kind, flag: u8, stream_id: StreamId) -> Head {
Head {
kind,
flag,
stream_id,
}
}
/// Parse an HTTP/2 frame header
pub fn parse(header: &[u8]) -> Head {
let (stream_id, _) = StreamId::parse(&header[5..]);
Head {
kind: Kind::new(header[3]),
flag: header[4],
stream_id,
}
}
pub fn stream_id(&self) -> StreamId {
self.stream_id
}
pub fn kind(&self) -> Kind {
self.kind
}
pub fn flag(&self) -> u8 {
self.flag
}
pub fn encode_len(&self) -> usize {
super::HEADER_LEN
}
pub fn encode<T: BufMut>(&self, payload_len: usize, dst: &mut T) {
debug_assert!(self.encode_len() <= dst.remaining_mut());
dst.put_uint(payload_len as u64, 3);
dst.put_u8(self.kind as u8);
dst.put_u8(self.flag);
dst.put_u32(self.stream_id.into());
}
}
// ===== impl Kind =====
impl Kind {
pub fn new(byte: u8) -> Kind {
match byte {
0 => Kind::Data,
1 => Kind::Headers,
2 => Kind::Priority,
3 => Kind::Reset,
4 => Kind::Settings,
5 => Kind::PushPromise,
6 => Kind::Ping,
7 => Kind::GoAway,
8 => Kind::WindowUpdate,
9 => Kind::Continuation,
_ => Kind::Unknown,
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,171 @@
use crate::hpack;
use bytes::Bytes;
use std::fmt;
/// A helper macro that unpacks a sequence of 4 bytes found in the buffer with
/// the given identifier, starting at the given offset, into the given integer
/// type. Obviously, the integer type should be able to support at least 4
/// bytes.
///
/// # Examples
///
/// ```ignore
/// # // We ignore this doctest because the macro is not exported.
/// let buf: [u8; 4] = [0, 0, 0, 1];
/// assert_eq!(1u32, unpack_octets_4!(buf, 0, u32));
/// ```
macro_rules! unpack_octets_4 {
// TODO: Get rid of this macro
($buf:expr, $offset:expr, $tip:ty) => {
(($buf[$offset + 0] as $tip) << 24)
| (($buf[$offset + 1] as $tip) << 16)
| (($buf[$offset + 2] as $tip) << 8)
| (($buf[$offset + 3] as $tip) << 0)
};
}
#[cfg(test)]
mod tests {
#[test]
fn test_unpack_octets_4() {
let buf: [u8; 4] = [0, 0, 0, 1];
assert_eq!(1u32, unpack_octets_4!(buf, 0, u32));
}
}
mod data;
mod go_away;
mod head;
mod headers;
mod ping;
mod priority;
mod reason;
mod reset;
mod settings;
mod stream_id;
mod util;
mod window_update;
pub use self::data::Data;
pub use self::go_away::GoAway;
pub use self::head::{Head, Kind};
pub use self::headers::{
parse_u64, Continuation, Headers, Pseudo, PushPromise, PushPromiseHeaderError,
};
pub use self::ping::Ping;
pub use self::priority::{Priority, StreamDependency};
pub use self::reason::Reason;
pub use self::reset::Reset;
pub use self::settings::Settings;
pub use self::stream_id::{StreamId, StreamIdOverflow};
pub use self::window_update::WindowUpdate;
#[cfg(feature = "unstable")]
pub use crate::hpack::BytesStr;
// Re-export some constants
pub use self::settings::{
DEFAULT_INITIAL_WINDOW_SIZE, DEFAULT_MAX_FRAME_SIZE, DEFAULT_SETTINGS_HEADER_TABLE_SIZE,
MAX_MAX_FRAME_SIZE,
};
pub type FrameSize = u32;
pub const HEADER_LEN: usize = 9;
#[derive(Eq, PartialEq)]
pub enum Frame<T = Bytes> {
Data(Data<T>),
Headers(Headers),
Priority(Priority),
PushPromise(PushPromise),
Settings(Settings),
Ping(Ping),
GoAway(GoAway),
WindowUpdate(WindowUpdate),
Reset(Reset),
}
impl<T> Frame<T> {
pub fn map<F, U>(self, f: F) -> Frame<U>
where
F: FnOnce(T) -> U,
{
use self::Frame::*;
match self {
Data(frame) => frame.map(f).into(),
Headers(frame) => frame.into(),
Priority(frame) => frame.into(),
PushPromise(frame) => frame.into(),
Settings(frame) => frame.into(),
Ping(frame) => frame.into(),
GoAway(frame) => frame.into(),
WindowUpdate(frame) => frame.into(),
Reset(frame) => frame.into(),
}
}
}
impl<T> fmt::Debug for Frame<T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
use self::Frame::*;
match *self {
Data(ref frame) => fmt::Debug::fmt(frame, fmt),
Headers(ref frame) => fmt::Debug::fmt(frame, fmt),
Priority(ref frame) => fmt::Debug::fmt(frame, fmt),
PushPromise(ref frame) => fmt::Debug::fmt(frame, fmt),
Settings(ref frame) => fmt::Debug::fmt(frame, fmt),
Ping(ref frame) => fmt::Debug::fmt(frame, fmt),
GoAway(ref frame) => fmt::Debug::fmt(frame, fmt),
WindowUpdate(ref frame) => fmt::Debug::fmt(frame, fmt),
Reset(ref frame) => fmt::Debug::fmt(frame, fmt),
}
}
}
/// Errors that can occur during parsing an HTTP/2 frame.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Error {
/// A length value other than 8 was set on a PING message.
BadFrameSize,
/// The padding length was larger than the frame-header-specified
/// length of the payload.
TooMuchPadding,
/// An invalid setting value was provided
InvalidSettingValue,
/// An invalid window update value
InvalidWindowUpdateValue,
/// The payload length specified by the frame header was not the
/// value necessary for the specific frame type.
InvalidPayloadLength,
/// Received a payload with an ACK settings frame
InvalidPayloadAckSettings,
/// An invalid stream identifier was provided.
///
/// This is returned if a SETTINGS or PING frame is received with a stream
/// identifier other than zero.
InvalidStreamId,
/// A request or response is malformed.
MalformedMessage,
/// An invalid stream dependency ID was provided
///
/// This is returned if a HEADERS or PRIORITY frame is received with an
/// invalid stream identifier.
InvalidDependencyId,
/// Failed to perform HPACK decoding
Hpack(hpack::DecoderError),
}

View File

@@ -0,0 +1,102 @@
use crate::frame::{Error, Frame, Head, Kind, StreamId};
use bytes::BufMut;
const ACK_FLAG: u8 = 0x1;
pub type Payload = [u8; 8];
#[derive(Debug, Eq, PartialEq)]
pub struct Ping {
ack: bool,
payload: Payload,
}
// This was just 8 randomly generated bytes. We use something besides just
// zeroes to distinguish this specific PING from any other.
const SHUTDOWN_PAYLOAD: Payload = [0x0b, 0x7b, 0xa2, 0xf0, 0x8b, 0x9b, 0xfe, 0x54];
const USER_PAYLOAD: Payload = [0x3b, 0x7c, 0xdb, 0x7a, 0x0b, 0x87, 0x16, 0xb4];
impl Ping {
#[cfg(feature = "unstable")]
pub const SHUTDOWN: Payload = SHUTDOWN_PAYLOAD;
#[cfg(not(feature = "unstable"))]
pub(crate) const SHUTDOWN: Payload = SHUTDOWN_PAYLOAD;
#[cfg(feature = "unstable")]
pub const USER: Payload = USER_PAYLOAD;
#[cfg(not(feature = "unstable"))]
pub(crate) const USER: Payload = USER_PAYLOAD;
pub fn new(payload: Payload) -> Ping {
Ping {
ack: false,
payload,
}
}
pub fn pong(payload: Payload) -> Ping {
Ping { ack: true, payload }
}
pub fn is_ack(&self) -> bool {
self.ack
}
pub fn payload(&self) -> &Payload {
&self.payload
}
pub fn into_payload(self) -> Payload {
self.payload
}
/// Builds a `Ping` frame from a raw frame.
pub fn load(head: Head, bytes: &[u8]) -> Result<Ping, Error> {
debug_assert_eq!(head.kind(), crate::frame::Kind::Ping);
// PING frames are not associated with any individual stream. If a PING
// frame is received with a stream identifier field value other than
// 0x0, the recipient MUST respond with a connection error
// (Section 5.4.1) of type PROTOCOL_ERROR.
if !head.stream_id().is_zero() {
return Err(Error::InvalidStreamId);
}
// In addition to the frame header, PING frames MUST contain 8 octets of opaque
// data in the payload.
if bytes.len() != 8 {
return Err(Error::BadFrameSize);
}
let mut payload = [0; 8];
payload.copy_from_slice(bytes);
// The PING frame defines the following flags:
//
// ACK (0x1): When set, bit 0 indicates that this PING frame is a PING
// response. An endpoint MUST set this flag in PING responses. An
// endpoint MUST NOT respond to PING frames containing this flag.
let ack = head.flag() & ACK_FLAG != 0;
Ok(Ping { ack, payload })
}
pub fn encode<B: BufMut>(&self, dst: &mut B) {
let sz = self.payload.len();
tracing::trace!("encoding PING; ack={} len={}", self.ack, sz);
let flags = if self.ack { ACK_FLAG } else { 0 };
let head = Head::new(Kind::Ping, flags, StreamId::zero());
head.encode(sz, dst);
dst.put_slice(&self.payload);
}
}
impl<T> From<Ping> for Frame<T> {
fn from(src: Ping) -> Frame<T> {
Frame::Ping(src)
}
}

View File

@@ -0,0 +1,72 @@
use crate::frame::*;
#[derive(Debug, Eq, PartialEq)]
pub struct Priority {
stream_id: StreamId,
dependency: StreamDependency,
}
#[derive(Debug, Eq, PartialEq)]
pub struct StreamDependency {
/// The ID of the stream dependency target
dependency_id: StreamId,
/// The weight for the stream. The value exposed (and set) here is always in
/// the range [0, 255], instead of [1, 256] (as defined in section 5.3.2.)
/// so that the value fits into a `u8`.
weight: u8,
/// True if the stream dependency is exclusive.
is_exclusive: bool,
}
impl Priority {
pub fn load(head: Head, payload: &[u8]) -> Result<Self, Error> {
let dependency = StreamDependency::load(payload)?;
if dependency.dependency_id() == head.stream_id() {
return Err(Error::InvalidDependencyId);
}
Ok(Priority {
stream_id: head.stream_id(),
dependency,
})
}
}
impl<B> From<Priority> for Frame<B> {
fn from(src: Priority) -> Self {
Frame::Priority(src)
}
}
// ===== impl StreamDependency =====
impl StreamDependency {
pub fn new(dependency_id: StreamId, weight: u8, is_exclusive: bool) -> Self {
StreamDependency {
dependency_id,
weight,
is_exclusive,
}
}
pub fn load(src: &[u8]) -> Result<Self, Error> {
if src.len() != 5 {
return Err(Error::InvalidPayloadLength);
}
// Parse the stream ID and exclusive flag
let (dependency_id, is_exclusive) = StreamId::parse(&src[..4]);
// Read the weight
let weight = src[4];
Ok(StreamDependency::new(dependency_id, weight, is_exclusive))
}
pub fn dependency_id(&self) -> StreamId {
self.dependency_id
}
}

View File

@@ -0,0 +1,134 @@
use std::fmt;
/// HTTP/2 error codes.
///
/// Error codes are used in `RST_STREAM` and `GOAWAY` frames to convey the
/// reasons for the stream or connection error. For example,
/// [`SendStream::send_reset`] takes a `Reason` argument. Also, the `Error` type
/// may contain a `Reason`.
///
/// Error codes share a common code space. Some error codes apply only to
/// streams, others apply only to connections, and others may apply to either.
/// See [RFC 7540] for more information.
///
/// See [Error Codes in the spec][spec].
///
/// [spec]: http://httpwg.org/specs/rfc7540.html#ErrorCodes
/// [`SendStream::send_reset`]: struct.SendStream.html#method.send_reset
#[derive(PartialEq, Eq, Clone, Copy)]
pub struct Reason(u32);
impl Reason {
/// The associated condition is not a result of an error.
///
/// For example, a GOAWAY might include this code to indicate graceful
/// shutdown of a connection.
pub const NO_ERROR: Reason = Reason(0);
/// The endpoint detected an unspecific protocol error.
///
/// This error is for use when a more specific error code is not available.
pub const PROTOCOL_ERROR: Reason = Reason(1);
/// The endpoint encountered an unexpected internal error.
pub const INTERNAL_ERROR: Reason = Reason(2);
/// The endpoint detected that its peer violated the flow-control protocol.
pub const FLOW_CONTROL_ERROR: Reason = Reason(3);
/// The endpoint sent a SETTINGS frame but did not receive a response in
/// a timely manner.
pub const SETTINGS_TIMEOUT: Reason = Reason(4);
/// The endpoint received a frame after a stream was half-closed.
pub const STREAM_CLOSED: Reason = Reason(5);
/// The endpoint received a frame with an invalid size.
pub const FRAME_SIZE_ERROR: Reason = Reason(6);
/// The endpoint refused the stream prior to performing any application
/// processing.
pub const REFUSED_STREAM: Reason = Reason(7);
/// Used by the endpoint to indicate that the stream is no longer needed.
pub const CANCEL: Reason = Reason(8);
/// The endpoint is unable to maintain the header compression context for
/// the connection.
pub const COMPRESSION_ERROR: Reason = Reason(9);
/// The connection established in response to a CONNECT request was reset
/// or abnormally closed.
pub const CONNECT_ERROR: Reason = Reason(10);
/// The endpoint detected that its peer is exhibiting a behavior that might
/// be generating excessive load.
pub const ENHANCE_YOUR_CALM: Reason = Reason(11);
/// The underlying transport has properties that do not meet minimum
/// security requirements.
pub const INADEQUATE_SECURITY: Reason = Reason(12);
/// The endpoint requires that HTTP/1.1 be used instead of HTTP/2.
pub const HTTP_1_1_REQUIRED: Reason = Reason(13);
/// Get a string description of the error code.
pub fn description(&self) -> &str {
match self.0 {
0 => "not a result of an error",
1 => "unspecific protocol error detected",
2 => "unexpected internal error encountered",
3 => "flow-control protocol violated",
4 => "settings ACK not received in timely manner",
5 => "received frame when stream half-closed",
6 => "frame with invalid size",
7 => "refused stream before processing any application logic",
8 => "stream no longer needed",
9 => "unable to maintain the header compression context",
10 => {
"connection established in response to a CONNECT request was reset or abnormally \
closed"
}
11 => "detected excessive load generating behavior",
12 => "security properties do not meet minimum requirements",
13 => "endpoint requires HTTP/1.1",
_ => "unknown reason",
}
}
}
impl From<u32> for Reason {
fn from(src: u32) -> Reason {
Reason(src)
}
}
impl From<Reason> for u32 {
fn from(src: Reason) -> u32 {
src.0
}
}
impl fmt::Debug for Reason {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let name = match self.0 {
0 => "NO_ERROR",
1 => "PROTOCOL_ERROR",
2 => "INTERNAL_ERROR",
3 => "FLOW_CONTROL_ERROR",
4 => "SETTINGS_TIMEOUT",
5 => "STREAM_CLOSED",
6 => "FRAME_SIZE_ERROR",
7 => "REFUSED_STREAM",
8 => "CANCEL",
9 => "COMPRESSION_ERROR",
10 => "CONNECT_ERROR",
11 => "ENHANCE_YOUR_CALM",
12 => "INADEQUATE_SECURITY",
13 => "HTTP_1_1_REQUIRED",
other => return f.debug_tuple("Reason").field(&Hex(other)).finish(),
};
f.write_str(name)
}
}
struct Hex(u32);
impl fmt::Debug for Hex {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::LowerHex::fmt(&self.0, f)
}
}
impl fmt::Display for Reason {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{}", self.description())
}
}

View File

@@ -0,0 +1,56 @@
use crate::frame::{self, Error, Head, Kind, Reason, StreamId};
use bytes::BufMut;
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct Reset {
stream_id: StreamId,
error_code: Reason,
}
impl Reset {
pub fn new(stream_id: StreamId, error: Reason) -> Reset {
Reset {
stream_id,
error_code: error,
}
}
pub fn stream_id(&self) -> StreamId {
self.stream_id
}
pub fn reason(&self) -> Reason {
self.error_code
}
pub fn load(head: Head, payload: &[u8]) -> Result<Reset, Error> {
if payload.len() != 4 {
return Err(Error::InvalidPayloadLength);
}
let error_code = unpack_octets_4!(payload, 0, u32);
Ok(Reset {
stream_id: head.stream_id(),
error_code: error_code.into(),
})
}
pub fn encode<B: BufMut>(&self, dst: &mut B) {
tracing::trace!(
"encoding RESET; id={:?} code={:?}",
self.stream_id,
self.error_code
);
let head = Head::new(Kind::Reset, 0, self.stream_id);
head.encode(4, dst);
dst.put_u32(self.error_code.into());
}
}
impl<B> From<Reset> for frame::Frame<B> {
fn from(src: Reset) -> Self {
frame::Frame::Reset(src)
}
}

View File

@@ -0,0 +1,389 @@
use std::fmt;
use crate::frame::{util, Error, Frame, FrameSize, Head, Kind, StreamId};
use bytes::{BufMut, BytesMut};
#[derive(Clone, Default, Eq, PartialEq)]
pub struct Settings {
flags: SettingsFlags,
// Fields
header_table_size: Option<u32>,
enable_push: Option<u32>,
max_concurrent_streams: Option<u32>,
initial_window_size: Option<u32>,
max_frame_size: Option<u32>,
max_header_list_size: Option<u32>,
enable_connect_protocol: Option<u32>,
}
/// An enum that lists all valid settings that can be sent in a SETTINGS
/// frame.
///
/// Each setting has a value that is a 32 bit unsigned integer (6.5.1.).
#[derive(Debug)]
pub enum Setting {
HeaderTableSize(u32),
EnablePush(u32),
MaxConcurrentStreams(u32),
InitialWindowSize(u32),
MaxFrameSize(u32),
MaxHeaderListSize(u32),
EnableConnectProtocol(u32),
}
#[derive(Copy, Clone, Eq, PartialEq, Default)]
pub struct SettingsFlags(u8);
const ACK: u8 = 0x1;
const ALL: u8 = ACK;
/// The default value of SETTINGS_HEADER_TABLE_SIZE
pub const DEFAULT_SETTINGS_HEADER_TABLE_SIZE: usize = 4_096;
/// The default value of SETTINGS_INITIAL_WINDOW_SIZE
pub const DEFAULT_INITIAL_WINDOW_SIZE: u32 = 65_535;
/// The default value of MAX_FRAME_SIZE
pub const DEFAULT_MAX_FRAME_SIZE: FrameSize = 16_384;
/// INITIAL_WINDOW_SIZE upper bound
pub const MAX_INITIAL_WINDOW_SIZE: usize = (1 << 31) - 1;
/// MAX_FRAME_SIZE upper bound
pub const MAX_MAX_FRAME_SIZE: FrameSize = (1 << 24) - 1;
// ===== impl Settings =====
impl Settings {
pub fn ack() -> Settings {
Settings {
flags: SettingsFlags::ack(),
..Settings::default()
}
}
pub fn is_ack(&self) -> bool {
self.flags.is_ack()
}
pub fn initial_window_size(&self) -> Option<u32> {
self.initial_window_size
}
pub fn set_initial_window_size(&mut self, size: Option<u32>) {
self.initial_window_size = size;
}
pub fn max_concurrent_streams(&self) -> Option<u32> {
self.max_concurrent_streams
}
pub fn set_max_concurrent_streams(&mut self, max: Option<u32>) {
self.max_concurrent_streams = max;
}
pub fn max_frame_size(&self) -> Option<u32> {
self.max_frame_size
}
pub fn set_max_frame_size(&mut self, size: Option<u32>) {
if let Some(val) = size {
assert!(DEFAULT_MAX_FRAME_SIZE <= val && val <= MAX_MAX_FRAME_SIZE);
}
self.max_frame_size = size;
}
pub fn max_header_list_size(&self) -> Option<u32> {
self.max_header_list_size
}
pub fn set_max_header_list_size(&mut self, size: Option<u32>) {
self.max_header_list_size = size;
}
pub fn is_push_enabled(&self) -> Option<bool> {
self.enable_push.map(|val| val != 0)
}
pub fn set_enable_push(&mut self, enable: bool) {
self.enable_push = Some(enable as u32);
}
pub fn is_extended_connect_protocol_enabled(&self) -> Option<bool> {
self.enable_connect_protocol.map(|val| val != 0)
}
pub fn set_enable_connect_protocol(&mut self, val: Option<u32>) {
self.enable_connect_protocol = val;
}
pub fn header_table_size(&self) -> Option<u32> {
self.header_table_size
}
pub fn set_header_table_size(&mut self, size: Option<u32>) {
self.header_table_size = size;
}
pub fn load(head: Head, payload: &[u8]) -> Result<Settings, Error> {
use self::Setting::*;
debug_assert_eq!(head.kind(), crate::frame::Kind::Settings);
if !head.stream_id().is_zero() {
return Err(Error::InvalidStreamId);
}
// Load the flag
let flag = SettingsFlags::load(head.flag());
if flag.is_ack() {
// Ensure that the payload is empty
if !payload.is_empty() {
return Err(Error::InvalidPayloadLength);
}
// Return the ACK frame
return Ok(Settings::ack());
}
// Ensure the payload length is correct, each setting is 6 bytes long.
if payload.len() % 6 != 0 {
tracing::debug!("invalid settings payload length; len={:?}", payload.len());
return Err(Error::InvalidPayloadAckSettings);
}
let mut settings = Settings::default();
debug_assert!(!settings.flags.is_ack());
for raw in payload.chunks(6) {
match Setting::load(raw) {
Some(HeaderTableSize(val)) => {
settings.header_table_size = Some(val);
}
Some(EnablePush(val)) => match val {
0 | 1 => {
settings.enable_push = Some(val);
}
_ => {
return Err(Error::InvalidSettingValue);
}
},
Some(MaxConcurrentStreams(val)) => {
settings.max_concurrent_streams = Some(val);
}
Some(InitialWindowSize(val)) => {
if val as usize > MAX_INITIAL_WINDOW_SIZE {
return Err(Error::InvalidSettingValue);
} else {
settings.initial_window_size = Some(val);
}
}
Some(MaxFrameSize(val)) => {
if DEFAULT_MAX_FRAME_SIZE <= val && val <= MAX_MAX_FRAME_SIZE {
settings.max_frame_size = Some(val);
} else {
return Err(Error::InvalidSettingValue);
}
}
Some(MaxHeaderListSize(val)) => {
settings.max_header_list_size = Some(val);
}
Some(EnableConnectProtocol(val)) => match val {
0 | 1 => {
settings.enable_connect_protocol = Some(val);
}
_ => {
return Err(Error::InvalidSettingValue);
}
},
None => {}
}
}
Ok(settings)
}
fn payload_len(&self) -> usize {
let mut len = 0;
self.for_each(|_| len += 6);
len
}
pub fn encode(&self, dst: &mut BytesMut) {
// Create & encode an appropriate frame head
let head = Head::new(Kind::Settings, self.flags.into(), StreamId::zero());
let payload_len = self.payload_len();
tracing::trace!("encoding SETTINGS; len={}", payload_len);
head.encode(payload_len, dst);
// Encode the settings
self.for_each(|setting| {
tracing::trace!("encoding setting; val={:?}", setting);
setting.encode(dst)
});
}
fn for_each<F: FnMut(Setting)>(&self, mut f: F) {
use self::Setting::*;
if let Some(v) = self.header_table_size {
f(HeaderTableSize(v));
}
if let Some(v) = self.enable_push {
f(EnablePush(v));
}
if let Some(v) = self.max_concurrent_streams {
f(MaxConcurrentStreams(v));
}
if let Some(v) = self.initial_window_size {
f(InitialWindowSize(v));
}
if let Some(v) = self.max_frame_size {
f(MaxFrameSize(v));
}
if let Some(v) = self.max_header_list_size {
f(MaxHeaderListSize(v));
}
if let Some(v) = self.enable_connect_protocol {
f(EnableConnectProtocol(v));
}
}
}
impl<T> From<Settings> for Frame<T> {
fn from(src: Settings) -> Frame<T> {
Frame::Settings(src)
}
}
impl fmt::Debug for Settings {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut builder = f.debug_struct("Settings");
builder.field("flags", &self.flags);
self.for_each(|setting| match setting {
Setting::EnablePush(v) => {
builder.field("enable_push", &v);
}
Setting::HeaderTableSize(v) => {
builder.field("header_table_size", &v);
}
Setting::InitialWindowSize(v) => {
builder.field("initial_window_size", &v);
}
Setting::MaxConcurrentStreams(v) => {
builder.field("max_concurrent_streams", &v);
}
Setting::MaxFrameSize(v) => {
builder.field("max_frame_size", &v);
}
Setting::MaxHeaderListSize(v) => {
builder.field("max_header_list_size", &v);
}
Setting::EnableConnectProtocol(v) => {
builder.field("enable_connect_protocol", &v);
}
});
builder.finish()
}
}
// ===== impl Setting =====
impl Setting {
/// Creates a new `Setting` with the correct variant corresponding to the
/// given setting id, based on the settings IDs defined in section
/// 6.5.2.
pub fn from_id(id: u16, val: u32) -> Option<Setting> {
use self::Setting::*;
match id {
1 => Some(HeaderTableSize(val)),
2 => Some(EnablePush(val)),
3 => Some(MaxConcurrentStreams(val)),
4 => Some(InitialWindowSize(val)),
5 => Some(MaxFrameSize(val)),
6 => Some(MaxHeaderListSize(val)),
8 => Some(EnableConnectProtocol(val)),
_ => None,
}
}
/// Creates a new `Setting` by parsing the given buffer of 6 bytes, which
/// contains the raw byte representation of the setting, according to the
/// "SETTINGS format" defined in section 6.5.1.
///
/// The `raw` parameter should have length at least 6 bytes, since the
/// length of the raw setting is exactly 6 bytes.
///
/// # Panics
///
/// If given a buffer shorter than 6 bytes, the function will panic.
fn load(raw: &[u8]) -> Option<Setting> {
let id: u16 = (u16::from(raw[0]) << 8) | u16::from(raw[1]);
let val: u32 = unpack_octets_4!(raw, 2, u32);
Setting::from_id(id, val)
}
fn encode(&self, dst: &mut BytesMut) {
use self::Setting::*;
let (kind, val) = match *self {
HeaderTableSize(v) => (1, v),
EnablePush(v) => (2, v),
MaxConcurrentStreams(v) => (3, v),
InitialWindowSize(v) => (4, v),
MaxFrameSize(v) => (5, v),
MaxHeaderListSize(v) => (6, v),
EnableConnectProtocol(v) => (8, v),
};
dst.put_u16(kind);
dst.put_u32(val);
}
}
// ===== impl SettingsFlags =====
impl SettingsFlags {
pub fn empty() -> SettingsFlags {
SettingsFlags(0)
}
pub fn load(bits: u8) -> SettingsFlags {
SettingsFlags(bits & ALL)
}
pub fn ack() -> SettingsFlags {
SettingsFlags(ACK)
}
pub fn is_ack(&self) -> bool {
self.0 & ACK == ACK
}
}
impl From<SettingsFlags> for u8 {
fn from(src: SettingsFlags) -> u8 {
src.0
}
}
impl fmt::Debug for SettingsFlags {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
util::debug_flags(f, self.0)
.flag_if(self.is_ack(), "ACK")
.finish()
}
}

View File

@@ -0,0 +1,94 @@
/// A stream identifier, as described in [Section 5.1.1] of RFC 7540.
///
/// Streams are identified with an unsigned 31-bit integer. Streams
/// initiated by a client MUST use odd-numbered stream identifiers; those
/// initiated by the server MUST use even-numbered stream identifiers. A
/// stream identifier of zero (0x0) is used for connection control
/// messages; the stream identifier of zero cannot be used to establish a
/// new stream.
///
/// [Section 5.1.1]: https://tools.ietf.org/html/rfc7540#section-5.1.1
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct StreamId(u32);
#[derive(Debug, Copy, Clone)]
pub struct StreamIdOverflow;
const STREAM_ID_MASK: u32 = 1 << 31;
impl StreamId {
/// Stream ID 0.
pub const ZERO: StreamId = StreamId(0);
/// The maximum allowed stream ID.
pub const MAX: StreamId = StreamId(u32::MAX >> 1);
/// Parse the stream ID
#[inline]
pub fn parse(buf: &[u8]) -> (StreamId, bool) {
let mut ubuf = [0; 4];
ubuf.copy_from_slice(&buf[0..4]);
let unpacked = u32::from_be_bytes(ubuf);
let flag = unpacked & STREAM_ID_MASK == STREAM_ID_MASK;
// Now clear the most significant bit, as that is reserved and MUST be
// ignored when received.
(StreamId(unpacked & !STREAM_ID_MASK), flag)
}
/// Returns true if this stream ID corresponds to a stream that
/// was initiated by the client.
pub fn is_client_initiated(&self) -> bool {
let id = self.0;
id != 0 && id % 2 == 1
}
/// Returns true if this stream ID corresponds to a stream that
/// was initiated by the server.
pub fn is_server_initiated(&self) -> bool {
let id = self.0;
id != 0 && id % 2 == 0
}
/// Return a new `StreamId` for stream 0.
#[inline]
pub fn zero() -> StreamId {
StreamId::ZERO
}
/// Returns true if this stream ID is zero.
pub fn is_zero(&self) -> bool {
self.0 == 0
}
/// Returns the next stream ID initiated by the same peer as this stream
/// ID, or an error if incrementing this stream ID would overflow the
/// maximum.
pub fn next_id(&self) -> Result<StreamId, StreamIdOverflow> {
let next = self.0 + 2;
if next > StreamId::MAX.0 {
Err(StreamIdOverflow)
} else {
Ok(StreamId(next))
}
}
}
impl From<u32> for StreamId {
fn from(src: u32) -> Self {
assert_eq!(src & STREAM_ID_MASK, 0, "invalid stream ID -- MSB is set");
StreamId(src)
}
}
impl From<StreamId> for u32 {
fn from(src: StreamId) -> Self {
src.0
}
}
impl PartialEq<u32> for StreamId {
fn eq(&self, other: &u32) -> bool {
self.0 == *other
}
}

Some files were not shown because too many files have changed in this diff Show More