69 | 69 | applepay_endpoint = "DOMAIN SPECIFIC ENDPOINT" | |
70 | 70 | ||
71 | 71 | [locker] | |
72 | host = "http://127.0.0.1:3000" | ||
73 | host_rs = "http://127.0.0.1:3000" | ||
72 | host = "http://127.0.0.1:3001" |
why we are changing these ports?
Because these are the ones which are being used in card-vault
can we change the port in card-vault, i don't know that the places we need to change(basically documentations and other places) this port.
160 | } | ||
161 | |||
162 | #[cfg(all(feature = "v2", feature = "tokenization_v2"))] | ||
163 | fn mask_sensitive_data(value: serde_json::Value) -> serde_json::Value { |
why we are using masking like this? there is secret which does the same right?
The function is needed for masking on all the keys present but yes the masking value could be used from the secret thing
usage of this is if I just want the key structure which I would have so that could create the proxy payment request in that format
We can introduce a masking strategy which does the same and using masking itself. So that there will be only one approach for masking
84 | 84 | redis_ttl_in_seconds = 172800 | |
85 | 85 | ||
86 | 86 | [jwekey] | |
87 | vault_encryption_key = """ | ||
88 | -----BEGIN PUBLIC KEY----- | ||
89 | MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwa6siKaSYqD1o4J3AbHq | ||
90 | Km8oVTvep7GoN/C45qY60C7DO72H1O7Ujt6ZsSiK83EyI0CaUg3ORPS3ayobFNmu | ||
91 | zR366ckK8GIf3BG7sVI6u/9751z4OvBHZMM9JFWa7Bx/RCPQ8aeM+iJoqf9auuQm | ||
92 | 3NCTlfaZJif45pShswR+xuZTR/bqnsOSP/MFROI9ch0NE7KRogy0tvrZe21lP24i | ||
93 | Ro2LJJG+bYshxBddhxQf2ryJ85+/Trxdu16PunodGzCl6EMT3bvb4ZC41i15omqU | ||
94 | aXXV1Z1wYUhlsO0jyd1bVvjyuE/KE1TbBS0gfR/RkacODmmE2zEdZ0EyyiXwqkmc | ||
95 | oQIDAQAB | ||
96 | -----END PUBLIC KEY----- | ||
97 | """ | ||
98 | rust_locker_encryption_key = """ | ||
99 | -----BEGIN PUBLIC KEY----- | ||
100 | MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwa6siKaSYqD1o4J3AbHq | ||
101 | Km8oVTvep7GoN/C45qY60C7DO72H1O7Ujt6ZsSiK83EyI0CaUg3ORPS3ayobFNmu | ||
102 | zR366ckK8GIf3BG7sVI6u/9751z4OvBHZMM9JFWa7Bx/RCPQ8aeM+iJoqf9auuQm | ||
103 | 3NCTlfaZJif45pShswR+xuZTR/bqnsOSP/MFROI9ch0NE7KRogy0tvrZe21lP24i | ||
104 | Ro2LJJG+bYshxBddhxQf2ryJ85+/Trxdu16PunodGzCl6EMT3bvb4ZC41i15omqU | ||
105 | aXXV1Z1wYUhlsO0jyd1bVvjyuE/KE1TbBS0gfR/RkacODmmE2zEdZ0EyyiXwqkmc | ||
106 | oQIDAQAB | ||
107 | -----END PUBLIC KEY----- | ||
108 | """ | ||
87 | vault_encryption_key = "" | ||
88 | rust_locker_encryption_key = "" |
why did we remove these keys? is this intentional?
Yes
510 | fn unique_constraints(&self) -> Vec<String> { | ||
511 | vec![ | ||
512 | format!("id_{}", self.id.get_string_repr()), | ||
513 | format!("locker_id_{}", self.locker_id), |
Global id itself will be a unique key right, is the composite unqiue key is required here
I hope its fine now ?
1 | #[cfg(all(feature = "v2", feature = "tokenization_v2"))] |
move this to storage impl instead of keeping this in router, for can refer other table in storage_impl crate for reference
2850 | |||
2851 | /// The json to be used for tokeniation | ||
2852 | #[schema(value_type = Option<serde_json::Value>)] | ||
2853 | pub tokenization_data: Option<masking::Secret<serde_json::Value>>, |
pub tokenization_data: Option<masking::Secret<serde_json::Value>>, | |
pub tokenization_data: Option<pii::SecretSerdeValue>, |
2870 | |||
2871 | /// The json to be used for tokeniation | ||
2872 | #[schema(value_type = Option<serde_json::Value>)] | ||
2873 | pub tokenization_data: Option<masking::Secret<serde_json::Value>>, |
please use this pii::SecretSerdeValue
and in other places
13 | #[cfg(all(feature = "v2", feature = "tokenization_v2"))] | ||
14 | /// Response structure for tokenization operations | ||
15 | #[derive(Debug, Serialize, Deserialize)] | ||
16 | pub struct TokenizationResponse { | ||
17 | /// Unique identifier for the tokenized data | ||
18 | pub id: GlobalTokenId, | ||
19 | /// Reference to the vault/locker where the actual data is stored | ||
20 | pub locker_id: String, | ||
21 | /// Timestamp when the token was created | ||
22 | pub created_at: PrimitiveDateTime, | ||
23 | /// Timestamp when the token was last updated | ||
24 | pub updated_at: PrimitiveDateTime, | ||
25 | /// Current status of the token | ||
26 | pub flag: TokenizationFlag, |
why do we need this tokenization.rs in common_utils crate?
tokenization types are present both in api, domain, and diesel, why do we need this here?
40 | /// Enum representing the status of a tokenized payment method | ||
41 | #[derive(Debug, Clone, Serialize, Deserialize, strum::Display, strum::EnumString)] | ||
42 | #[strum(serialize_all = "snake_case")] | ||
43 | pub enum TokenizationFlag { | ||
44 | /// Token is active and can be used for payments | ||
45 | Enabled, | ||
46 | /// Token is inactive and cannot be used for payments | ||
47 | Disabled, |
why do we need this here? we have already added in common enums ryt?
67 | } | ||
68 | |||
69 | #[cfg(all(feature = "v2", feature = "tokenization_v2"))] | ||
70 | impl Tokenization { | ||
71 | pub async fn insert(self, conn: &PgPooledConn) -> StorageResult<Self> { | ||
72 | generics::generic_insert(conn, self).await | ||
73 | } | ||
74 | } |
this too.
Because this is there in storage_impl
That's why?
108 | |||
109 | #[instrument(skip_all)] | ||
110 | #[cfg(all(feature = "v2", feature = "tokenization_v2"))] | ||
111 | pub async fn get_token_vault_core( |
nit: this name could be better.
ps: not able to get the functionality from the name.
Can take up later when we'll use this for a handler fuction
18 | use error_stack::ResultExt; | ||
19 | #[cfg(all(feature = "v2", feature = "tokenization_v2"))] | ||
20 | use hyperswitch_domain_models; | ||
21 | #[cfg(all(feature = "v2", feature = "tokenization_v2"))] |
nit: instead of having this on every import n method, this can be moved to parent level. core.rs in the same crate.
52 | |state, auth: auth::AuthenticationData, request, _| async move { | ||
53 | tokenization::create_vault_token_core( | ||
54 | state, | ||
55 | &auth.merchant_account, |
merchantContext has been introduced recently. we should use that ig
CC: @maverox
Login to write a write a comment.
Type of Change
Description
Create a new set of endpoints for giving tokenization as a service for generic usage, current motivation of the service is to be used in proxy payment service.
Additional Changes
Motivation and Context
How did you test it?
Create a token
Response
Checklist
cargo +nightly fmt --all
cargo clippy