import re
regex = re.compile(r"(\v)")
test_str = ("line one⋮\n"
"line twoimport { CustomCacheKey } from '@layer0/core/router'\n\n"
"router.get('/some/path', ({ cache }) => {\n\n"
" cache({\n\n"
" key: new CustomCacheKey().addCookie('language').addCookie('currency'),\n\n"
" // Other options...\n\n"
" })\n\n"
"})import { CustomCacheKey } from '@layer0/core/router'\n\n"
"router.get('/some/path', ({ cache }) => {\n\n"
" cache({\n\n"
" browser: {\n\n"
" // Sets the cache-control: maxage=n header sent to the browser. To prevent the browser from caching this route\n\n"
" // set maxAgeSeconds: 0\n\n"
" maxAgeSeconds: 0,\n\n"
" // Sends a non-standard header `x-sw-cache-control: n` that you can use to control caching your service worker.\n\n"
" // Note that service workers do not understand this header by default, so you would need to add code to your service\n\n"
" // worker to support it\n\n"
" serviceWorkerSeconds: 60 * 60,\n\n"
" },\n\n"
" edge: {\n\n"
" // Sets the TTL for a response in Layer0's edge cache\n\n"
" maxAgeSeconds: 60 * 60 * 24,\n\n"
" // Sets the amount of time a stale response will be served from the cache. When a stale response is sent, Layer0\n\n"
" // will simultaneously fetch a new response to serve subsequent requests.\n\n"
" // Using stale-while-revalidate helps raise your effective cache hit rate to near 100%.\n\n"
" staleWhileRevalidateSeconds: 60 * 60, // serve stale responses for up to 1 hour while fetching a new response\n\n"
" // And many other options\n\n"
" },\n\n"
" // Optionally customizes the cache key for both edge and browser\n\n"
" key: new CustomCacheKey()\n\n"
" .addBrowser() // Split cache by browser type\n\n"
" .addCookie('some-cookie'), // Split cache by some-cookie cookie\n\n"
" })\n\n"
"})router.get('/scripts/:file', ({ serveStatic }) => {\n\n"
" serveStatic('path/to/scripts', {\n\n"
" permanent: true, // ensure that files are permanently accessible, even after a new version of the site has been deployed.\n\n"
" exclude: ['some-non-versioned-file.js'], // you can exclude specific files from being served permanently. You should do this for any files that do not have a hash of the content in the name.\n\n"
" })\n\n"
"})")
match = regex.search(test_str)
if match:
print(f"Match was found at {match.start()}-{match.end()}: {match.group()}")
for group_num, group in enumerate(match.groups(), start=1):
print(f"Group {group_num} found at {match.start(group_num)}-{match.end(group_num)}: {group}")
Please keep in mind that these code samples are automatically generated and are not guaranteed to work. If you find any syntax errors, feel free to submit a bug report. For a full regex reference for Python, please visit: https://docs.python.org/3/library/re.html