- Notifications
You must be signed in to change notification settings - Fork907
feat: set default workspace proxy based on latency#17812
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to ourterms of service andprivacy statement. We’ll occasionally send you account related emails.
Already on GitHub?Sign in to your account
Uh oh!
There was an error while loading.Please reload this page.
Changes fromall commits
ab3f897
fd172b7
5de9d61
e0b9eb3
5427b61
dcf98d7
93bc6b3
f5c0719
f2b5d9c
b3102b7
d07e472
abf49ae
6755899
3383a80
fdb545c
f5b76e4
a528411
8cc71ea
9d9e7f3
6036996
01aebed
File filter
Filter by extension
Conversations
Uh oh!
There was an error while loading.Please reload this page.
Jump to
Uh oh!
There was an error while loading.Please reload this page.
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -48,6 +48,11 @@ export const useProxyLatency = ( | ||
// Until the new values are loaded, the old values will still be used. | ||
refetch: () => Date; | ||
proxyLatencies: Record<string, ProxyLatencyReport>; | ||
// loaded signals all latency requests have completed. Once set to true, this will not change. | ||
// Latencies at this point should be loaded from local storage, and updated asynchronously as needed. | ||
// If local storage has updated latencies, then this will be set to true with 0 actual network requests. | ||
// The loaded latencies will all be from the cache. | ||
loaded: boolean; | ||
} => { | ||
// maxStoredLatencies is the maximum number of latencies to store per proxy in local storage. | ||
let maxStoredLatencies = 1; | ||
@@ -73,6 +78,8 @@ export const useProxyLatency = ( | ||
new Date(new Date().getTime() - proxyIntervalSeconds * 1000).toISOString(), | ||
); | ||
const [loaded, setLoaded] = useState(false); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others.Learn more. This is how I track when the latencies are done loading, since they all come back async | ||
// Refetch will always set the latestFetchRequest to the current time, making all the cached latencies | ||
// stale and triggering a refetch of all proxies in the list. | ||
const refetch = () => { | ||
@@ -231,6 +238,7 @@ export const useProxyLatency = ( | ||
// Local storage cleanup | ||
garbageCollectStoredLatencies(proxies, maxStoredLatencies); | ||
setLoaded(true); | ||
}); | ||
return () => { | ||
@@ -241,6 +249,7 @@ export const useProxyLatency = ( | ||
return { | ||
proxyLatencies, | ||
refetch, | ||
loaded, | ||
}; | ||
}; | ||
Uh oh!
There was an error while loading.Please reload this page.