Skip to content

Commit

Permalink
chore: formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
prajjwalkumar17 committed Jan 18, 2024
2 parents 5da238d + 0c124f4 commit 3e7acf4
Show file tree
Hide file tree
Showing 58 changed files with 2,192 additions and 460 deletions.
11 changes: 11 additions & 0 deletions .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,17 @@ postman/ @juspay/hyperswitch-framework
Cargo.toml @juspay/hyperswitch-framework
Cargo.lock @juspay/hyperswitch-framework

crates/api_models/src/events/ @juspay/hyperswitch-analytics
crates/api_models/src/events.rs @juspay/hyperswitch-analytics
crates/api_models/src/analytics/ @juspay/hyperswitch-analytics
crates/api_models/src/analytics.rs @juspay/hyperswitch-analytics
crates/router/src/analytics.rs @juspay/hyperswitch-analytics
crates/router/src/events/ @juspay/hyperswitch-analytics
crates/router/src/events.rs @juspay/hyperswitch-analytics
crates/common_utils/src/events/ @juspay/hyperswitch-analytics
crates/common_utils/src/events.rs @juspay/hyperswitch-analytics
crates/analytics/ @juspay/hyperswitch-analytics

connector-template/ @juspay/hyperswitch-connector
crates/router/src/connector/ @juspay/hyperswitch-connector
crates/router/tests/connectors/ @juspay/hyperswitch-connector
Expand Down
Binary file removed .github/secrets/connector_auth.toml.gpg
Binary file not shown.
21 changes: 15 additions & 6 deletions .github/workflows/connector-ui-sanity-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -84,22 +84,31 @@ jobs:
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
uses: actions/checkout@v4

- name: Decrypt connector auth file
- name: Download Encrypted TOML from S3 and Decrypt
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CONNECTOR_CREDS_AWS_ACCESS_KEY_ID }}
AWS_REGION: ${{ secrets.CONNECTOR_CREDS_AWS_REGION }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CONNECTOR_CREDS_AWS_SECRET_ACCESS_KEY }}
CONNECTOR_AUTH_PASSPHRASE: ${{ secrets.CONNECTOR_AUTH_PASSPHRASE }}
CONNECTOR_CREDS_S3_BUCKET_URI: ${{ secrets.CONNECTOR_CREDS_S3_BUCKET_URI}}
DESTINATION_FILE_NAME: "connector_auth.toml.gpg"
S3_SOURCE_FILE_NAME: "cf05a6ab-525e-4888-98b3-3b4a443b87c0.toml.gpg"
shell: bash
run: ./scripts/decrypt_connector_auth.sh
run: |
mkdir -p ${HOME}/target/secrets ${HOME}/target/test
aws s3 cp "${CONNECTOR_CREDS_S3_BUCKET_URI}/${S3_SOURCE_FILE_NAME}" "${HOME}/target/secrets/${DESTINATION_FILE_NAME}"
gpg --quiet --batch --yes --decrypt --passphrase="${CONNECTOR_AUTH_PASSPHRASE}" --output "${HOME}/target/test/connector_auth.toml" "${HOME}/target/secrets/${DESTINATION_FILE_NAME}"
- name: Set connector auth file path in env
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
shell: bash
run: echo "CONNECTOR_AUTH_FILE_PATH=$HOME/target/test/connector_auth.toml" >> $GITHUB_ENV
run: echo "CONNECTOR_AUTH_FILE_PATH=${HOME}/target/test/connector_auth.toml" >> $GITHUB_ENV

- name: Set connector tests file path in env
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
shell: bash
run: echo "CONNECTOR_TESTS_FILE_PATH=$HOME/target/test/connector_tests.json" >> $GITHUB_ENV
run: echo "CONNECTOR_TESTS_FILE_PATH=${HOME}/target/test/connector_tests.json" >> $GITHUB_ENV

- name: Set ignore_browser_profile usage in env
if: ${{ (github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name) }}
Expand Down Expand Up @@ -154,9 +163,9 @@ jobs:
failed_connectors=()
for i in $(echo "$INPUT" | tr "," "\n"); do
echo $i
echo "${i}"
if ! cargo test --package test_utils --test connectors -- "${i}_ui::" --test-threads=1; then
failed_connectors+=("$i")
failed_connectors+=("${i}")
fi
done
Expand Down
30 changes: 20 additions & 10 deletions .github/workflows/postman-collection-runner.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,27 +52,37 @@ jobs:
- name: Repository checkout
uses: actions/checkout@v4

- name: Decrypt connector auth file
- name: Download Encrypted TOML from S3 and Decrypt
if: ${{ ((github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name)) || (github.event_name == 'merge_group')}}
env:
AWS_ACCESS_KEY_ID: ${{ secrets.CONNECTOR_CREDS_AWS_ACCESS_KEY_ID }}
AWS_REGION: ${{ secrets.CONNECTOR_CREDS_AWS_REGION }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.CONNECTOR_CREDS_AWS_SECRET_ACCESS_KEY }}
CONNECTOR_AUTH_PASSPHRASE: ${{ secrets.CONNECTOR_AUTH_PASSPHRASE }}
CONNECTOR_CREDS_S3_BUCKET_URI: ${{ secrets.CONNECTOR_CREDS_S3_BUCKET_URI}}
DESTINATION_FILE_NAME: "connector_auth.toml.gpg"
S3_SOURCE_FILE_NAME: "cf05a6ab-525e-4888-98b3-3b4a443b87c0.toml.gpg"
shell: bash
run: ./scripts/decrypt_connector_auth.sh
run: |
mkdir -p ${HOME}/target/secrets ${HOME}/target/test
aws s3 cp "${CONNECTOR_CREDS_S3_BUCKET_URI}/${S3_SOURCE_FILE_NAME}" "${HOME}/target/secrets/${DESTINATION_FILE_NAME}"
gpg --quiet --batch --yes --decrypt --passphrase="${CONNECTOR_AUTH_PASSPHRASE}" --output "${HOME}/target/test/connector_auth.toml" "${HOME}/target/secrets/${DESTINATION_FILE_NAME}"
- name: Set paths in env
if: ${{ ((github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name)) || (github.event_name == 'merge_group')}}
id: config_path
shell: bash
run: |
echo "CONNECTOR_AUTH_FILE_PATH=$HOME/target/test/connector_auth.toml" >> $GITHUB_ENV
echo "CONNECTOR_AUTH_FILE_PATH=${HOME}/target/test/connector_auth.toml" >> $GITHUB_ENV
- name: Fetch keys
if: ${{ ((github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name)) || (github.event_name == 'merge_group')}}
env:
TOML_PATH: "./config/development.toml"
run: |
LOCAL_ADMIN_API_KEY=$(yq '.secrets.admin_api_key' $TOML_PATH)
echo "ADMIN_API_KEY=$LOCAL_ADMIN_API_KEY" >> $GITHUB_ENV
LOCAL_ADMIN_API_KEY=$(yq '.secrets.admin_api_key' ${TOML_PATH})
echo "ADMIN_API_KEY=${LOCAL_ADMIN_API_KEY}" >> $GITHUB_ENV
- name: Install Rust
if: ${{ ((github.event_name == 'pull_request') && (github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name)) || (github.event_name == 'merge_group')}}
Expand Down Expand Up @@ -118,7 +128,7 @@ jobs:
while ! nc -z localhost 8080; do
if [ $COUNT -gt 12 ]; then # Wait for up to 2 minutes (12 * 10 seconds)
echo "Server did not start within a reasonable time. Exiting."
kill $SERVER_PID
kill ${SERVER_PID}
exit 1
else
COUNT=$((COUNT+1))
Expand All @@ -141,10 +151,10 @@ jobs:
export PATH=${NEWMAN_PATH}:${PATH}
failed_connectors=()
for i in $(echo "$CONNECTORS" | tr "," "\n"); do
echo $i
if ! cargo run --bin test_utils -- --connector-name="$i" --base-url="$BASE_URL" --admin-api-key="$ADMIN_API_KEY"; then
failed_connectors+=("$i")
for i in $(echo "${CONNECTORS}" | tr "," "\n"); do
echo "${i}"
if ! cargo run --bin test_utils -- --connector-name="${i}" --base-url="${BASE_URL}" --admin-api-key="${ADMIN_API_KEY}"; then
failed_connectors+=("${i}")
fi
done
Expand Down
41 changes: 41 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,47 @@ All notable changes to HyperSwitch will be documented here.

- - -

## 2024.01.18.0

### Features

- **connector_events:** Added api to fetch connector event logs ([#3319](https://github.com/juspay/hyperswitch/pull/3319)) ([`68a3a28`](https://github.com/juspay/hyperswitch/commit/68a3a280676c8309f9becffae545b134b5e1f2ea))
- **payment_method:** Add capability to store bank details using /payment_methods endpoint ([#3113](https://github.com/juspay/hyperswitch/pull/3113)) ([`01c2de2`](https://github.com/juspay/hyperswitch/commit/01c2de223f60595d77c06a59a40dfe041e02cfee))

### Bug Fixes

- **core:** Add validation for authtype and metadata in update payment connector ([#3305](https://github.com/juspay/hyperswitch/pull/3305)) ([`52f38d3`](https://github.com/juspay/hyperswitch/commit/52f38d3d5a7d035e8211e1f51c8f982232e2d7ab))
- **events:** Fix event generation for paymentmethods list ([#3337](https://github.com/juspay/hyperswitch/pull/3337)) ([`ac8d81b`](https://github.com/juspay/hyperswitch/commit/ac8d81b32b3d91b875113d32782a8c62e39ba2a8))

### Refactors

- **connector:** [cybersource] recurring mandate flow ([#3354](https://github.com/juspay/hyperswitch/pull/3354)) ([`387c1c4`](https://github.com/juspay/hyperswitch/commit/387c1c491bdc413ae361d04f0be25eaa58e72fa9))
- [Noon] adding new field max_amount to mandate request ([#3209](https://github.com/juspay/hyperswitch/pull/3209)) ([`eb2a61d`](https://github.com/juspay/hyperswitch/commit/eb2a61d8597995838f21b8233653c691118b2191))

### Miscellaneous Tasks

- **router:** Remove recon from default features ([#3370](https://github.com/juspay/hyperswitch/pull/3370)) ([`928beec`](https://github.com/juspay/hyperswitch/commit/928beecdd7fe9e09b38ffe750627ca4af94ffc93))

**Full Changelog:** [`2024.01.17.0...2024.01.18.0`](https://github.com/juspay/hyperswitch/compare/2024.01.17.0...2024.01.18.0)

- - -

## 2024.01.17.0

### Features

- **connector:** [BANKOFAMERICA] Implement 3DS flow for cards ([#3343](https://github.com/juspay/hyperswitch/pull/3343)) ([`d533c98`](https://github.com/juspay/hyperswitch/commit/d533c98b5107fb6876c11b183eb9bc382a77a2f1))
- **recon:** Add recon APIs ([#3345](https://github.com/juspay/hyperswitch/pull/3345)) ([`8678f8d`](https://github.com/juspay/hyperswitch/commit/8678f8d1448b5ce430931bfbbc269ef979d9eea7))

### Bug Fixes

- **connector_onboarding:** Check if connector exists for the merchant account and add reset tracking id API ([#3229](https://github.com/juspay/hyperswitch/pull/3229)) ([`58cc8d6`](https://github.com/juspay/hyperswitch/commit/58cc8d6109ce49d385b06c762ab3f6670f5094eb))
- **payment_link:** Added expires_on in payment response ([#3332](https://github.com/juspay/hyperswitch/pull/3332)) ([`5ad3f89`](https://github.com/juspay/hyperswitch/commit/5ad3f8939afafce3eec39704dcaa92270b384dcd))

**Full Changelog:** [`2024.01.12.1...2024.01.17.0`](https://github.com/juspay/hyperswitch/compare/2024.01.12.1...2024.01.17.0)

- - -

## 2024.01.12.1

### Miscellaneous Tasks
Expand Down
1 change: 1 addition & 0 deletions config/config.example.toml
Original file line number Diff line number Diff line change
Expand Up @@ -351,6 +351,7 @@ stripe = { payment_method = "bank_transfer" }
nuvei = { payment_method = "card" }
shift4 = { payment_method = "card" }
bluesnap = { payment_method = "card" }
bankofamerica = {payment_method = "card"}
cybersource = {payment_method = "card"}
nmi = {payment_method = "card"}

Expand Down
1 change: 1 addition & 0 deletions config/development.toml
Original file line number Diff line number Diff line change
Expand Up @@ -428,6 +428,7 @@ stripe = {payment_method = "bank_transfer"}
nuvei = {payment_method = "card"}
shift4 = {payment_method = "card"}
bluesnap = {payment_method = "card"}
bankofamerica = {payment_method = "card"}
cybersource = {payment_method = "card"}
nmi = {payment_method = "card"}

Expand Down
1 change: 1 addition & 0 deletions config/docker_compose.toml
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,7 @@ stripe = {payment_method = "bank_transfer"}
nuvei = {payment_method = "card"}
shift4 = {payment_method = "card"}
bluesnap = {payment_method = "card"}
bankofamerica = {payment_method = "card"}
cybersource = {payment_method = "card"}
nmi = {payment_method = "card"}

Expand Down
16 changes: 16 additions & 0 deletions crates/analytics/src/clickhouse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ use crate::{
filters::ApiEventFilter,
metrics::{latency::LatencyAvg, ApiEventMetricRow},
},
connector_events::events::ConnectorEventsResult,
outgoing_webhook_event::events::OutgoingWebhookLogsResult,
sdk_events::events::SdkEventsResult,
types::TableEngine,
Expand Down Expand Up @@ -121,6 +122,7 @@ impl AnalyticsDataSource for ClickhouseClient {
}
AnalyticsCollection::SdkEvents => TableEngine::BasicTree,
AnalyticsCollection::ApiEvents => TableEngine::BasicTree,
AnalyticsCollection::ConnectorEvents => TableEngine::BasicTree,
AnalyticsCollection::OutgoingWebhookEvent => TableEngine::BasicTree,
}
}
Expand All @@ -147,6 +149,7 @@ impl super::sdk_events::events::SdkEventsFilterAnalytics for ClickhouseClient {}
impl super::api_event::events::ApiLogsFilterAnalytics for ClickhouseClient {}
impl super::api_event::filters::ApiEventFilterAnalytics for ClickhouseClient {}
impl super::api_event::metrics::ApiEventMetricAnalytics for ClickhouseClient {}
impl super::connector_events::events::ConnectorEventLogAnalytics for ClickhouseClient {}
impl super::outgoing_webhook_event::events::OutgoingWebhookLogsFilterAnalytics
for ClickhouseClient
{
Expand Down Expand Up @@ -188,6 +191,18 @@ impl TryInto<SdkEventsResult> for serde_json::Value {
}
}

impl TryInto<ConnectorEventsResult> for serde_json::Value {
type Error = Report<ParsingError>;

fn try_into(self) -> Result<ConnectorEventsResult, Self::Error> {
serde_json::from_value(self)
.into_report()
.change_context(ParsingError::StructParseFailure(
"Failed to parse ConnectorEventsResult in clickhouse results",
))
}
}

impl TryInto<PaymentMetricRow> for serde_json::Value {
type Error = Report<ParsingError>;

Expand Down Expand Up @@ -344,6 +359,7 @@ impl ToSql<ClickhouseClient> for AnalyticsCollection {
Self::SdkEvents => Ok("sdk_events_dist".to_string()),
Self::ApiEvents => Ok("api_audit_log".to_string()),
Self::PaymentIntent => Ok("payment_intents_dist".to_string()),
Self::ConnectorEvents => Ok("connector_events_audit".to_string()),
Self::OutgoingWebhookEvent => Ok("outgoing_webhook_events_audit".to_string()),
}
}
Expand Down
5 changes: 5 additions & 0 deletions crates/analytics/src/connector_events.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
mod core;
pub mod events;
pub trait ConnectorEventAnalytics: events::ConnectorEventLogAnalytics {}

pub use self::core::connector_events_core;
27 changes: 27 additions & 0 deletions crates/analytics/src/connector_events/core.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
use api_models::analytics::connector_events::ConnectorEventsRequest;
use common_utils::errors::ReportSwitchExt;
use error_stack::{IntoReport, ResultExt};

use super::events::{get_connector_events, ConnectorEventsResult};
use crate::{errors::AnalyticsResult, types::FiltersError, AnalyticsProvider};

pub async fn connector_events_core(
pool: &AnalyticsProvider,
req: ConnectorEventsRequest,
merchant_id: String,
) -> AnalyticsResult<Vec<ConnectorEventsResult>> {
let data = match pool {
AnalyticsProvider::Sqlx(_) => Err(FiltersError::NotImplemented(
"Connector Events not implemented for SQLX",
))
.into_report()
.attach_printable("SQL Analytics is not implemented for Connector Events"),
AnalyticsProvider::Clickhouse(ckh_pool)
| AnalyticsProvider::CombinedSqlx(_, ckh_pool)
| AnalyticsProvider::CombinedCkh(_, ckh_pool) => {
get_connector_events(&merchant_id, req, ckh_pool).await
}
}
.switch()?;
Ok(data)
}
63 changes: 63 additions & 0 deletions crates/analytics/src/connector_events/events.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
use api_models::analytics::{
connector_events::{ConnectorEventsRequest, QueryType},
Granularity,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;

use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow},
};
pub trait ConnectorEventLogAnalytics: LoadRow<ConnectorEventsResult> {}

pub async fn get_connector_events<T>(
merchant_id: &String,
query_param: ConnectorEventsRequest,
pool: &T,
) -> FiltersResult<Vec<ConnectorEventsResult>>
where
T: AnalyticsDataSource + ConnectorEventLogAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::ConnectorEvents);
query_builder.add_select_column("*").switch()?;

query_builder
.add_filter_clause("merchant_id", merchant_id)
.switch()?;
match query_param.query_param {
QueryType::Payment { payment_id } => query_builder
.add_filter_clause("payment_id", payment_id)
.switch()?,
}
//TODO!: update the execute_query function to return reports instead of plain errors...
query_builder
.execute_query::<ConnectorEventsResult, _>(pool)
.await
.change_context(FiltersError::QueryBuildingError)?
.change_context(FiltersError::QueryExecutionFailure)
}

#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct ConnectorEventsResult {
pub merchant_id: String,
pub payment_id: String,
pub connector_name: Option<String>,
pub request_id: Option<String>,
pub flow: String,
pub request: String,
pub response: Option<String>,
pub error: Option<String>,
pub status_code: u16,
pub latency: Option<u128>,
#[serde(with = "common_utils::custom_serde::iso8601")]
pub created_at: PrimitiveDateTime,
pub method: Option<String>,
}
1 change: 1 addition & 0 deletions crates/analytics/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ mod query;
pub mod refunds;

pub mod api_event;
pub mod connector_events;
pub mod outgoing_webhook_event;
pub mod sdk_events;
mod sqlx;
Expand Down
2 changes: 2 additions & 0 deletions crates/analytics/src/sqlx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -429,6 +429,8 @@ impl ToSql<SqlxClient> for AnalyticsCollection {
Self::ApiEvents => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("ApiEvents table is not implemented for Sqlx"))?,
Self::PaymentIntent => Ok("payment_intent".to_string()),
Self::ConnectorEvents => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("ConnectorEvents table is not implemented for Sqlx"))?,
Self::OutgoingWebhookEvent => Err(error_stack::report!(ParsingError::UnknownError)
.attach_printable("OutgoingWebhookEvents table is not implemented for Sqlx"))?,
}
Expand Down
1 change: 1 addition & 0 deletions crates/analytics/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ pub enum AnalyticsCollection {
SdkEvents,
ApiEvents,
PaymentIntent,
ConnectorEvents,
OutgoingWebhookEvent,
}

Expand Down
2 changes: 1 addition & 1 deletion crates/api_models/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ readme = "README.md"
license.workspace = true

[features]
default = ["payouts", "frm", "recon"]
default = ["payouts", "frm"]
business_profile_routing = []
connector_choice_bcompat = []
errors = ["dep:actix-web", "dep:reqwest"]
Expand Down
1 change: 1 addition & 0 deletions crates/api_models/src/analytics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use self::{
pub use crate::payments::TimeRange;

pub mod api_event;
pub mod connector_events;
pub mod outgoing_webhook_event;
pub mod payments;
pub mod refunds;
Expand Down
Loading

0 comments on commit 3e7acf4

Please sign in to comment.