- Notifications
You must be signed in to change notification settings - Fork1.1k
feat: set default workspace proxy based on latency#17812
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to ourterms of service andprivacy statement. We’ll occasionally send you account related emails.
Already on GitHub?Sign in to your account
Uh oh!
There was an error while loading.Please reload this page.
Changes fromall commits
ab3f897fd172b75de9d61e0b9eb35427b61dcf98d793bc6b3f5c0719f2b5d9cb3102b7d07e472abf49ae67558993383a80fdb545cf5b76e4a5284118cc71ea9d9e7f3603699601aebedFile filter
Filter by extension
Conversations
Uh oh!
There was an error while loading.Please reload this page.
Jump to
Uh oh!
There was an error while loading.Please reload this page.
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -48,6 +48,11 @@ export const useProxyLatency = ( | ||
| // Until the new values are loaded, the old values will still be used. | ||
| refetch: () => Date; | ||
| proxyLatencies: Record<string, ProxyLatencyReport>; | ||
| // loaded signals all latency requests have completed. Once set to true, this will not change. | ||
| // Latencies at this point should be loaded from local storage, and updated asynchronously as needed. | ||
| // If local storage has updated latencies, then this will be set to true with 0 actual network requests. | ||
| // The loaded latencies will all be from the cache. | ||
| loaded: boolean; | ||
| } => { | ||
| // maxStoredLatencies is the maximum number of latencies to store per proxy in local storage. | ||
| let maxStoredLatencies = 1; | ||
| @@ -73,6 +78,8 @@ export const useProxyLatency = ( | ||
| new Date(new Date().getTime() - proxyIntervalSeconds * 1000).toISOString(), | ||
| ); | ||
| const [loaded, setLoaded] = useState(false); | ||
MemberAuthor There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others.Learn more. This is how I track when the latencies are done loading, since they all come back async | ||
| // Refetch will always set the latestFetchRequest to the current time, making all the cached latencies | ||
| // stale and triggering a refetch of all proxies in the list. | ||
| const refetch = () => { | ||
| @@ -231,6 +238,7 @@ export const useProxyLatency = ( | ||
| // Local storage cleanup | ||
| garbageCollectStoredLatencies(proxies, maxStoredLatencies); | ||
| setLoaded(true); | ||
| }); | ||
| return () => { | ||
| @@ -241,6 +249,7 @@ export const useProxyLatency = ( | ||
| return { | ||
| proxyLatencies, | ||
| refetch, | ||
| loaded, | ||
| }; | ||
| }; | ||
Uh oh!
There was an error while loading.Please reload this page.