Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ci: Add linter CI and lint all codes #156

Merged
merged 17 commits into from
Dec 3, 2024
114 changes: 114 additions & 0 deletions .github/workflows/linter.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
name: Lint

on:
push:
branches:
- master
pull_request:
branches:
- master
workflow_dispatch:

permissions:
checks: write
pull-requests: write

jobs:
rust-analyze:
name: Rust Analyze
runs-on: ubuntu-latest

steps:
- name: Check Out
uses: actions/checkout@v4

- name: Setup Flutter Toolchain
uses: subosito/flutter-action@v2
with:
channel: 'stable'

- name: Setup Rust toolchain
uses: dtolnay/rust-toolchain@nightly
with:
components: rustfmt, clippy

- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y liblmdb0 jq alsa-base alsa-source librust-alsa-sys-dev libasound2-dev liblmdb-dev clang cmake ninja-build pkg-config libgtk-3-dev dpkg-dev libayatana-appindicator3-dev libnotify-dev

- uses: Swatinem/rust-cache@v2

- name: Install Protoc
uses: arduino/setup-protoc@v3
with:
version: "26.x"
repo-token: ${{ secrets.GITHUB_TOKEN }}

- name: Install the Rust dependencies
run: cargo install 'flutter_rust_bridge_codegen' rinf protoc-gen-prost

- name: Activate Protobuf
run: flutter pub global activate protoc_plugin

- name: Flutter pub get
run: flutter pub get

- name: Generate message files
run: rinf message

- name: Run cargo fmt
run: cargo fmt -- --check

- name: Run cargo clippy
run: |
rustup override set stable
cargo clippy -- -D warnings

flutter-analyze:
name: Flutter analyze
runs-on: ubuntu-latest

steps:
- name: Check Out
uses: actions/checkout@v4

- name: Setup Flutter Toolchain
uses: subosito/flutter-action@v2
with:
channel: 'stable'

- name: Setup Rust Toolchain
uses: dtolnay/rust-toolchain@stable

- uses: Swatinem/rust-cache@v2

- name: Install Protoc
uses: arduino/setup-protoc@v3
with:
version: "26.x"
repo-token: ${{ secrets.GITHUB_TOKEN }}

- name: Install the Rust dependencies
run: cargo install 'flutter_rust_bridge_codegen' rinf protoc-gen-prost

- name: Activate Protobuf
run: flutter pub global activate protoc_plugin

- name: Flutter pub get
run: flutter pub get

- name: Generate message files
run: rinf message

- name: Analyze Flutter
run: |
flutter analyze .

- name: Dart Flutter
run: |
dart analyze .




1 change: 1 addition & 0 deletions .rustfmt.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ignore = ["native/hub/src/messages"]
6 changes: 6 additions & 0 deletions Justfile
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
lint:
cargo fmt -- --check
cargo clippy -- -D warnings
flutter analyze .
dart analyze .

macos-ci-all: macos-ci-clean macos-ci-install
./scripts/macos_2_build.sh
./scripts/macos_3_prepare_before_sign.sh
Expand Down
5 changes: 3 additions & 2 deletions analysis/src/analyzer/cpu_sub_analyzer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,9 @@ impl SubAnalyzer for CpuSubAnalyzer {
core_analyzer.total_zcr += zcr(resampled_chunk);
core_analyzer.total_energy += energy(resampled_chunk);

let start_idx = self.batch_cache_buffer_count * core_analyzer.window_size;
let buffer_slice = &mut self.fft_input_buffer[start_idx..start_idx + core_analyzer.window_size];
let start_idx = self.batch_cache_buffer_count * core_analyzer.window_size;
let buffer_slice =
&mut self.fft_input_buffer[start_idx..start_idx + core_analyzer.window_size];
for (i, sample) in buffer_slice.iter_mut().enumerate() {
*sample = resampled_chunk[i] * self.hanning_window[i];
}
Expand Down
3 changes: 2 additions & 1 deletion analysis/src/analyzer/gpu_sub_analyzer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ impl SubAnalyzer for GpuSubAnalyzer {
core_analyzer.total_energy += energy(resampled_chunk);

let start_idx = self.batch_cache_buffer_count * core_analyzer.window_size;
let buffer_slice = &mut self.batch_fft_buffer[start_idx..start_idx + core_analyzer.window_size];
let buffer_slice =
&mut self.batch_fft_buffer[start_idx..start_idx + core_analyzer.window_size];
for (i, sample) in buffer_slice.iter_mut().enumerate() {
*sample = Complex::new(resampled_chunk[i] * self.hanning_window[i], 0.0);
}
Expand Down
2 changes: 1 addition & 1 deletion analysis/src/shared_utils/computing_device.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,4 @@ impl From<&str> for ComputingDevice {
_ => ComputingDevice::Gpu,
}
}
}
}
2 changes: 1 addition & 1 deletion cli/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
pub mod analysis;
pub mod index;
pub mod mix;
pub mod playback;
pub mod recommend;
pub mod mix;
10 changes: 8 additions & 2 deletions cli/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,13 @@ async fn main() {
index_audio_library(&main_db).await;
}
Commands::Analyze { computing_device } => {
analyze_audio_library(computing_device.as_str().into(), &main_db, &analysis_db, &path).await;
analyze_audio_library(
computing_device.as_str().into(),
&main_db,
&analysis_db,
&path,
)
.await;
}
Commands::Info { file_ids } => {
match get_metadata_summary_by_file_ids(&main_db, file_ids.to_vec()).await {
Expand Down Expand Up @@ -263,4 +269,4 @@ async fn main() {
}
},
}
}
}
2 changes: 1 addition & 1 deletion database/src/actions/analysis.rs
Original file line number Diff line number Diff line change
Expand Up @@ -420,7 +420,7 @@ pub async fn get_percentile(
};
// .with_context(|| "Unable to get analysis value")?;

Ok(result.unwrap_or_default() as f32)
Ok(result.unwrap_or_default())
}

pub async fn get_percentile_analysis_result(
Expand Down
5 changes: 4 additions & 1 deletion database/src/actions/file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,10 @@ use anyhow::Result;
use metadata::describe::FileDescription;
use rust_decimal::prelude::ToPrimitive;
use sea_orm::entity::prelude::*;
use sea_orm::{ColumnTrait, EntityTrait, FromQueryResult, Order, QueryFilter, QueryOrder, QuerySelect, QueryTrait};
use sea_orm::{
ColumnTrait, EntityTrait, FromQueryResult, Order, QueryFilter, QueryOrder, QuerySelect,
QueryTrait,
};

use migration::{Func, SimpleExpr};

Expand Down
8 changes: 3 additions & 5 deletions database/src/actions/index.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use anyhow::{Result, Error};
use anyhow::{Error, Result};
use log::{error, info};
use sea_orm::{prelude::*, ActiveValue};
use sea_orm::{DatabaseConnection, Set, TransactionTrait};
Expand Down Expand Up @@ -167,10 +167,8 @@ pub async fn index_audio_library(
let producer = async {
loop {
// Fetch the next batch of files
let files: Vec<media_files::Model> = cursor
.first(batch_size.try_into()?)
.all(main_db)
.await?;
let files: Vec<media_files::Model> =
cursor.first(batch_size.try_into()?).all(main_db).await?;

if files.is_empty() {
info!("No more files to process. Exiting loop.");
Expand Down
2 changes: 1 addition & 1 deletion database/src/actions/metadata.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
use std::collections::HashMap;
use std::path::{Path, PathBuf};

use regex::Regex;
use anyhow::{bail, Context, Result};
use log::{debug, error, info};
use regex::Regex;
use rust_decimal::prelude::{FromPrimitive, ToPrimitive};
use sea_orm::entity::prelude::*;
use sea_orm::{ActiveValue, ColumnTrait, EntityTrait, QueryFilter};
Expand Down
2 changes: 1 addition & 1 deletion database/src/connection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ pub fn get_storage_info(lib_path: &str, db_path: Option<&str>) -> Result<Storage
let db_path = db_path.context("db_path is required for redirected storage")?;
PathBuf::from(db_path).join(uuid.to_string())
}
}
},
};

Ok(StorageInfo {
Expand Down
2 changes: 1 addition & 1 deletion database/src/entities/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,4 @@ pub mod mix_queries;
pub mod mixes;
pub mod playback_queue;
pub mod playlists;
pub mod search_index;
pub mod search_index;
3 changes: 2 additions & 1 deletion metadata/src/artist.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ use lazy_static::lazy_static;
use regex::Regex;

lazy_static! {
static ref SPLITTERS: Vec<&'static str> = vec![", ", "; ", " × ", " x ", " / ", " ft.", " ft. ", " feat. " , " & "];
static ref SPLITTERS: Vec<&'static str> =
vec![", ", "; ", " × ", " x ", " / ", " ft.", " ft. ", " feat. ", " & "];
static ref WHITELIST: Vec<&'static str> = vec![];
static ref SPLITTERS_REGEX: Regex = {
let splitters_pattern = SPLITTERS
Expand Down
6 changes: 3 additions & 3 deletions metadata/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
pub mod artist;
pub mod cover_art;
pub mod crc;
pub mod describe;
pub mod reader;
pub mod scanner;
pub mod artist;
pub mod describe;
pub mod cover_art;
19 changes: 13 additions & 6 deletions metadata/src/reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,11 @@ pub fn string_to_standard_tag_key(s: &str) -> Option<StandardTagKey> {
STRING_TO_STANDARD_TAG_KEY.get(s).cloned()
}

fn push_tags(revision: &MetadataRevision, metadata_list: &mut Vec<(String, String)>, field_blacklist: &[&str]) {
fn push_tags(
revision: &MetadataRevision,
metadata_list: &mut Vec<(String, String)>,
field_blacklist: &[&str],
) {
for tag in revision.tags() {
let std_key = match tag.std_key {
Some(standard_key) => standard_tag_key_to_string(standard_key),
Expand All @@ -207,7 +211,10 @@ fn push_tags(revision: &MetadataRevision, metadata_list: &mut Vec<(String, Strin
}
}

pub fn get_metadata(file_path: &str, field_blacklist: Option<Vec<&str>>) -> Result<Vec<(String, String)>> {
pub fn get_metadata(
file_path: &str,
field_blacklist: Option<Vec<&str>>,
) -> Result<Vec<(String, String)>> {
if !Path::new(file_path).exists() {
bail!("File not found");
}
Expand All @@ -228,13 +235,13 @@ pub fn get_metadata(file_path: &str, field_blacklist: Option<Vec<&str>>) -> Resu
let meta_opts: MetadataOptions = Default::default();

// Probe the media source.
let mut probed = symphonia::default::get_probe()
.format(&hint, mss, &fmt_opts, &meta_opts)?;
let mut probed = symphonia::default::get_probe().format(&hint, mss, &fmt_opts, &meta_opts)?;

let mut format = probed.format;
let mut metadata_list = Vec::new();

let blacklist = field_blacklist.unwrap_or(vec!["encoded_by", "encoder", "comment", "description"]);
let blacklist =
field_blacklist.unwrap_or(vec!["encoded_by", "encoder", "comment", "description"]);

if let Some(metadata_rev) = format.metadata().current() {
push_tags(metadata_rev, &mut metadata_list, &blacklist);
Expand All @@ -243,4 +250,4 @@ pub fn get_metadata(file_path: &str, field_blacklist: Option<Vec<&str>>) -> Resu
}

Ok(metadata_list)
}
}
6 changes: 1 addition & 5 deletions migration/src/m20230701_000001_create_media_files_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,7 @@ impl MigrationTrait for Migration {
.not_null(),
)
.col(ColumnDef::new(MediaFiles::CoverArtId).integer().null())
.col(
ColumnDef::new(MediaFiles::SampleRate)
.integer()
.not_null(),
)
.col(ColumnDef::new(MediaFiles::SampleRate).integer().not_null())
.col(ColumnDef::new(MediaFiles::Duration).double().not_null())
.foreign_key(
ForeignKey::create()
Expand Down
14 changes: 11 additions & 3 deletions migration/src/m20230728_000008_create_media_cover_art_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,16 @@ impl MigrationTrait for Migration {
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(MediaCoverArt::FileHash).char_len(64).not_null())
.col(ColumnDef::new(MediaCoverArt::Binary).var_binary(16777216).not_null())
.col(
ColumnDef::new(MediaCoverArt::FileHash)
.char_len(64)
.not_null(),
)
.col(
ColumnDef::new(MediaCoverArt::Binary)
.var_binary(16777216)
.not_null(),
)
.to_owned(),
)
.await
Expand All @@ -42,5 +50,5 @@ pub enum MediaCoverArt {
Id,
FileHash,
Binary,
PrimaryColor
PrimaryColor,
}
6 changes: 1 addition & 5 deletions migration/src/m20230806_000009_create_artists_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,7 @@ impl MigrationTrait for Migration {
.not_null()
.unique_key(),
)
.col(
ColumnDef::new(Artists::Group)
.string()
.not_null(),
)
.col(ColumnDef::new(Artists::Group).string().not_null())
.to_owned(),
)
.await
Expand Down
6 changes: 1 addition & 5 deletions migration/src/m20230806_000011_create_albums_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,7 @@ impl MigrationTrait for Migration {
.not_null()
.unique_key(),
)
.col(
ColumnDef::new(Albums::Group)
.string()
.not_null(),
)
.col(ColumnDef::new(Albums::Group).string().not_null())
.to_owned(),
)
.await
Expand Down
2 changes: 1 addition & 1 deletion migration/src/m20231029_000017_create_search_index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ impl MigrationTrait for Migration {

// ID is only for making sea-orm happy
db.execute_unprepared(
"CREATE VIRTUAL TABLE search_index USING fts5(id, key, entry_type, doc);"
"CREATE VIRTUAL TABLE search_index USING fts5(id, key, entry_type, doc);",
)
.await?;

Expand Down
Loading
Loading