Go to File > Preferences > Settings
Search: Import Module Specifier Ending
Set the JavaScript Preferences to .js / .ts
eg.:- 123,45,-6,-7.
Data Types:-
There are 4 types of Integral literal.
Symbols: 0 to 9 Rule:- It must not be start with 0.
symbol:- 0 to 7 Rule:- It must be starts with 0
symbol:- 0 to 9 and A/a to F/f. Rule:- It must be starts with 0x / 0X.
symbol:- 0 and 1 Rule:- It must be starts with 0b/0B.
user28102875,
There is an issue with MultiIndex in Dataframe, this way we can fix it
# Drop columns that are not needed
if isinstance(df.columns, pd.MultiIndex) and df.columns.nlevels > 1:
df.columns = df.columns.droplevel(1)
Also StochasticOscillator requires High, Low, Close
stoch_rsi = StochasticOscillator(df['High'], df['Low'], df['Close'], window=14, smooth_window=3)
There is your full fixed code (maybe you need another changes for logic, but it works):
import yfinance as yf
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from ta.trend import EMAIndicator
from ta.momentum import RSIIndicator, StochasticOscillator
from ta.volatility import AverageTrueRange
# Data Retrieval
def get_data(ticker, start, end):
df = yf.download(ticker, start=start, end=end)
df.reset_index(inplace=True) # Reset the index to make it easier to work with
df['Date'] = df['Date'].dt.date # Convert to date type if needed
print("Initial DataFrame head:\n", df.head()) # Print the first few rows
print("Initial DataFrame shape:", df.shape) # Print the shape of the DataFrame
# Drop columns that are not needed
if isinstance(df.columns, pd.MultiIndex) and df.columns.nlevels > 1:
df.columns = df.columns.droplevel(1)
return df
# Indicator Calculation
def calculate_indicators(df):
# Exponential Moving Averages (EMAs)
df['EMA8'] = EMAIndicator(df['Close'], window=8).ema_indicator()
df['EMA14'] = EMAIndicator(df['Close'], window=14).ema_indicator()
df['EMA50'] = EMAIndicator(df['Close'], window=50).ema_indicator()
# Print shapes to debug
print("EMA8 shape:", df['EMA8'].shape)
print("EMA14 shape:", df['EMA14'].shape)
print("EMA50 shape:", df['EMA50'].shape)
# Relative Strength Index (RSI)
df['RSI14'] = RSIIndicator(df['Close'], window=14).rsi()
print("RSI14 shape:", df['RSI14'].shape)
# Stochastic RSI
stoch_rsi = StochasticOscillator(df['High'], df['Low'], df['Close'], window=14, smooth_window=3)
df['StochRSI_K'] = stoch_rsi.stoch()
df['StochRSI_D'] = stoch_rsi.stoch_signal()
# Print shapes
print("StochRSI_K shape:", df['StochRSI_K'].shape)
print("StochRSI_D shape:", df['StochRSI_D'].shape)
# Average True Range (ATR)
atr = AverageTrueRange(df['High'], df['Low'], df['Close'], window=14)
df['ATR14'] = atr.average_true_range()
print("ATR14 shape:", df['ATR14'].shape)
# Drop rows with NaN values
df.dropna(inplace=True)
return df
# Entry Conditions
def signal_generator(df):
df['Long'] = (
(df['Close'] > df['EMA8']) &
(df['EMA8'] > df['EMA14']) &
(df['EMA14'] > df['EMA50']) &
(df['StochRSI_K'] > df['StochRSI_D'])
)
df['Short'] = (
(df['Close'] < df['EMA8']) &
(df['EMA8'] < df['EMA14']) &
(df['EMA14'] < df['EMA50']) &
(df['StochRSI_K'] < df['StochRSI_D'])
)
return df
# Position Management
def backtest_strategy(df):
position = None
entry_price = 0
results = []
for index, row in df.iterrows():
if position is None: # No open position
if row['Long']:
position = 'long'
entry_price = row['Close']
stop_loss = entry_price - (3 * row['ATR14'])
take_profit = entry_price + (2 * row['ATR14'])
elif row['Short']:
position = 'short'
entry_price = row['Close']
stop_loss = entry_price + (3 * row['ATR14'])
take_profit = entry_price - (2 * row['ATR14'])
elif position == 'long':
if row['Close'] >= take_profit or row['Close'] <= stop_loss:
results.append(row['Close'] - entry_price) # Profit or loss
position = None # Close position
elif position == 'short':
if row['Close'] <= take_profit or row['Close'] >= stop_loss:
results.append(entry_price - row['Close']) # Profit or loss
position = None # Close position
return results
# Performance Metrics
def calculate_performance(results, df):
df['Daily_Returns'] = df['Close'].pct_change()
df['Strategy_Returns'] = pd.Series(results).shift(1).fillna(0) # Align with dates
df['Cumulative_Strategy'] = (1 + df['Strategy_Returns']).cumprod()
df['Cumulative_Buy_Hold'] = (1 + df['Daily_Returns']).cumprod()
return df
# Visualization
def plot_performance(df):
plt.figure(figsize=(12, 6))
plt.plot(df['Cumulative_Strategy'], label='Strategy Returns', color='blue')
plt.plot(df['Cumulative_Buy_Hold'], label='Buy and Hold Returns', color='orange')
plt.title('Cumulative Returns: Strategy vs. Buy and Hold')
plt.xlabel('Date')
plt.ylabel('Cumulative Returns')
plt.legend()
plt.grid()
plt.show()
# Main Execution
ticker = 'IWV'
start_date = '2020-01-01'
end_date = '2024-01-01'
data = get_data(ticker, start_date, end_date)
# Check the DataFrame contents before calculating indicators
print("Data after retrieval:\n", data.head())
print("Data shape after retrieval:", data.shape)
# Now attempt to calculate indicators
data = calculate_indicators(data)
data = signal_generator(data)
results = backtest_strategy(data)
data = calculate_performance(results, data)
plot_performance(data)
For example, In controller emit the event in your post method
...
import { Observable, fromEvent, map } from 'rxjs';
import { EventEmitter2, OnEvent } from '@nestjs/event-emitter';
@Controller('users')
export class UsersController {
constructor(
private readonly usersService: UsersService,
private eventEmitter: EventEmitter2,
) { }
@Post()
@ApiOkResponse({ type: UserEntity })
async create(@Body() createUserDto: CreateUserDto) {
const user = await this.usersService.create(createUserDto);
this.eventEmitter.emit('user-create', user);
return user;
}
@OnEvent('user-create')
@Sse('create-user')
sse(): Observable<MessageEvent> {
try {
return fromEvent(this.eventEmitter, 'employee-create').pipe(
map((data) => {
return new MessageEvent('employee-create', { data: data } as MessageEvent);
}),
);
} catch (e) {
console.log('error', e);
}
}
}
If you are still using the next.js version <= 12. you can do this
/* /styles/global.css */
@font-face {
font-family: 'Rubik';
src: url('../fonts/rubik/Rubik-Regular.ttf');
font-weight: normal;
font-style: normal;
}
@font-face {
font-family: 'Rubik';
src: url('../fonts/rubik/Rubik-Bold.ttf');
font-weight: bold;
font-style: normal;
}
@font-face {
font-family: 'Rubik';
src: url('../fonts/rubik/Rubik-Italic.ttf');
font-weight: normal;
font-style: italic;
}
If you are using a newer version like 13 and above, you can do this from this Link
In the first step, you need to define your local font(s), and then you need to pass it like a CSS module class.
import localFont from 'next/font/local'
// Font files can be colocated inside of `pages`
const myFont = localFont({ src: './my-font.woff2' })
export default function MyApp({ Component, pageProps }) {
return (
<main className={myFont.className}>
<Component {...pageProps} />
</main>
)
}
What is the currently configured timeout on the client? The Braintree gateway has an official timeout of 60 secs, so technically the subscription creation can take up to that amount of time.
Unfortunately, if you have a custom timeout set of less than 60 secs then they cannot guarantee a response back within that time. You'll likely need to implement some retry logic to have the client retry to check the state of the subscription creation after the timeout.
See best practice for timeouts here: https://developer.paypal.com/braintree/docs/reference/general/best-practices/php#timeouts
RU, Saing, Shikil, +880 1845-757940, +880 1976-612174, +880 1575-065717, +880 1863-500123, +880 1820-013631, +880 1610-671428, +880 1706-477720, +880 1616-131464, +880 1641-079407, +880 1799-792012, +880 1721-738011, +92 313 8900940, +880 1744-187695, +880 1405-513804, +880 1842-581824, +880 1647-560292, +60 14-397 9786, +880 1643-341771, +880 1711-107445, +880 1571-220704, +880 1929-885738, +880 1710-263590, +880 1752-791476, +880 1715-661293, +880 1621-340012, +880 1718-738776, +880 1752-828711, +880 1772-412177, +880 1881-224409, +880 1921-543333, +880 1788-333628, +880 1911-914780, +880 1626-142086, +880 1612-125046, +880 1776-133555, +880 1770-709758, +880 1789-860588, +880 1722-499244, +966 50 890 6487, +880 1616-085204, +880 1716-221163, +880 1871-319446, +880 1811-976628, +880 1726-680087, +880 1856-717670, +880 1727-448395, +880 1740-939292, +880 1787-256420, +880 1823-798100, +880 1708-903102, +880 1623-002804, +880 1712-211990, +880 1330-192100, +880 1996-637820, +880 1730-258272, +880 1753-383651, +880 1711-661071, +880 1401-309045, +880 1747-030458, +880 1811-151688, +880 1790-668790, +880 1885-106006, +880 1875-580869, +880 1518-335276, +880 1812-840111, +880 1609-478170, +880 1621-933421, +880 1838-627240, +880 1888-249342, +880 1869-065862, +880 1738-942775, +880 1637-935071, +880 1743-124735, +880 1829-067578, +880 1711-100946, +880 1713-965035, +880 1717-585068, +880 1712-880335, +880 1642-792050, +880 1683-607606, +880 1924-206690, +880 1303-028030, +880 1626-356833, +91 72596 08133, +880 1748-156843, +880 1943-209498, +880 1601-668463, +880 1517-161727, +880 1766-264801, +880 1823-926155, +880 1830-348002, +880 1715-367580, +880 1947-708104, +880 1818-909889, +880 1719-980298, +880 1312-191527, +880 1749-795800, +880 1401-062645, +880 1580-618718, +880 1632-603078, +880 1303-073354, +880 1905-309927, +880 1711-781718, +880 1811-578199, +880 1948-576759, +880 1857-942200, +880 1729-179588, +880 1839-498860, +880 1847-311126, +880 1951-482596, +880 1765-566884, +880 1909-935877, +880 1762-107589, +880 1753-478020, +880 1322-699688, +880 1747-810897, +880 1947-440918, +880 1948-810482, +880 1712-385201, +880 1304-681493, +880 1948-410626, +880 1741-894491, +880 1618-730017, +880 1991-162227, +880 1601-213819, +880 1716-762749, +880 1715-404163, +880 1744-080996, +880 1604-049635, +880 1710-303702, +880 1815-040525, +880 1837-458060, +880 1565-039918, +880 1866-668258, +880 1747-952684, +880 1601-111591, +880 1646-270717, +880 1721-833300, +1 (385) 685-0269, +880 1611-997883, +880 1750-925505, +880 1568-416007, +880 1760-487056, +880 1611-314141, +880 1317-064063, +880 1749-838221, +880 1819-974300, +880 1729-151544, +880 1840-060276, +880 1608-184067, +880 1885-054581, +880 1970-316551, +880 1956-110021, +880 1886-654654, +880 1914-287898, +880 1862-663616, +880 1731-412654, +91 91506 98060, +880 1610-216184, +880 1647-760746, +880 1746-018803, +880 1974-901530, +880 1733-077173, +968 7965 9755, +880 1958-421884, +880 1330-192101, +880 1536-187189, +880 1839-024674, +880 1638-770292, +880 1851-601346, +880 1939-701417, +880 1617-084927, +880 1622-775416, +880 1940-536319, +880 1860-837086, +880 1876-428498, +880 1987-291425, +880 1883-719387, +880 1619-835371, +880 1622-021200, +880 1951-975972, +880 1971-118361, +880 1309-291342, +880 1756-159728, +880 1796-501637, +880 1780-226835, +880 1924-483030, +880 1828-811711, +880 1332-473271, +880 1855-846873, +880 1935-070654, +880 1641-700083, +880 1911-975676, +81 80-5774-9683, +880 1300-075408, +880 1301-610500, +880 1302-513141, +880 1302-906432, +880 1303-340382, +880 1305-908177, +880 1306-561862, +880 1307-146935, +880 1310-431101, +880 1311-487422, +880 1312-647323, +880 1312-806899, +880 1314-138306, +880 1315-046166, +880 1316-592481, +880 1318-683041, +880 1319-596104, +880 1321-880664, +880 1323-780565, +880 1324-209336, +880 1328-105117, +880 1331-793397, +880 1401-060073, +880 1401-722925, +880 1401-966733, +880 1401-976440, +880 1403-966342, +880 1404-667431, +880 1404-842673, +880 1511-266600, +880 1517-133222, +880 1518-420246, +880 1521-580547, +880 1537-101343, +880 1538-363757, +880 1552-430604, +880 1558-223859, +880 1580-436532, +880 1581-860820, +880 1600-524936, +880 1601-792372, +880 1608-114875, +880 1608-428529, +880 1608-878233, +880 1608-887741, +880 1609-095289, +880 1609-113497, +880 1609-323550, +880 1609-403838, +880 1609-552215, +880 1609-792211, +880 1611-363671, +880 1611-475494, +880 1611-771778, +880 1611-909485, +880 1611-964764, +880 1611-984578, +880 1612-335000, +880 1612-908089, +880 1613-171890, +880 1614-989035, +880 1615-694588, +880 1615-840604, +880 1616-127370, +880 1616-515254, +880 1616-822080, +880 1620-635398, +880 1621-996021, +880 1623-337897, +880 1624-067636, +880 1625-111220, +880 1625-155453, +880 1625-666268, +880 1626-064050, +880 1627-010575, +880 1627-347131, +880 1627-505406, +880 1628-606419, +880 1628-856735, +880 1632-409378, +880 1633-381857, +880 1634-074727, +880 1634-267295, +880 1635-011082, +880 1635-242351, +880 1635-572258, +880 1638-027991, +880 1638-120322, +880 1640-369216, +880 1641-437055, +880 1642-005307, +880 1642-373934, +880 1643-972790, +880 1644-440508, +880 1645-170325, +880 1648-310290, +880 1648-853494, +880 1650-021354, +880 1660-140073, +880 1670-296351, +880 1672-531420, +880 1673-342506, +880 1673-838489, +880 1674-969118, +880 1676-248073, +880 1676-747666, +880 1676-756999, +880 1679-247023, +880 1683-646564, +880 1685-735924, +880 1686-717350, +880 1689-640474, +880 1690-050417, +880 1700-843058, +880 1703-316572, +880 1703-771759, +880 1704-003887, +880 1704-648452, +880 1704-994506, +880 1705-198016, +880 1705-696343, +880 1705-708225, +880 1706-863090, +880 1707-082949, +880 1707-088475, +880 1707-172531, +880 1707-198993, +880 1710-179314, +880 1710-334444, +880 1710-515916, +880 1710-587633, +880 1710-634323, +880 1710-878892, +880 1711-161779, +880 1711-180031, +880 1711-376878, +880 1711-390771, +880 1711-414940, +880 1711-427307, +880 1711-964042, +880 1712-311328, +880 1712-748790, +880 1712-932948, +880 1713-145415, +880 1713-524347, +880 1713-763492, +880 1713-788149, +880 1714-282602, +880 1714-616167, +880 1714-688788, +880 1714-739651, +880 1714-818592, +880 1714-985359, +880 1715-141534, +880 1715-163742, +880 1715-267479, +880 1715-426346, +880 1715-752051, +880 1715-829606, +880 1716-318230, +880 1716-692925, +880 1716-926729, +880 1717-265423, +880 1717-331341, +880 1717-375535, +880 1718-010766, +880 1718-271818, +880 1718-409104, +880 1718-501601, +880 1718-899418, +880 1718-944665, +880 1718-968774, +880 1718-995670, +880 1719-205859, +880 1719-256252, +880 1719-509169, +880 1721-162780, +880 1721-166722, +880 1721-757773, +880 1721-869002, +880 1722-882449, +880 1723-257333, +880 1723-485202, +880 1723-565979, +880 1723-610020, +880 1723-641162, +880 1724-153293, +880 1724-487787, +880 1724-567142, +880 1724-608807, +880 1725-468805, +880 1725-478640, +880 1725-649689, +880 1728-078543, +880 1728-079036, +880 1729-789017, +880 1730-230301, +880 1730-243555, +880 1730-278711, +880 1730-474809, +880 1731-046771, +880 1732-349883, +880 1732-424047, +880 1732-858609, +880 1733-044222, +880 1734-392226, +880 1737-300665, +880 1737-321306, +880 1737-942976, +880 1738-158152, +880 1738-302925, +880 1739-167191, +880 1739-232003, +880 1739-270455, +880 1739-342983, +880 1739-683058, +880 1739-938258, +880 1740-908813, +880 1741-298506, +880 1741-301278, +880 1741-659205, +880 1741-667357, +880 1742-067395, +880 1742-486343, +880 1742-800943, +880 1742-966341, +880 1743-083511, +880 1743-886302, +880 1744-770496, +880 1744-882139, +880 1745-303779, +880 1746-046457, +880 1746-401124, +880 1747-087444, +880 1747-446551, +880 1747-464821, +880 1747-727545, +880 1747-984363, +880 1748-325659, +880 1748-842272, +880 1751-723628, +880 1752-569267, +880 1752-984522, +880 1753-768106, +880 1754-532959, +880 1754-743947, +880 1754-832980, +880 1755-239637, +880 1755-779557, +880 1756-184000, +880 1756-206521, +880 1757-322132, +880 1758-078066, +880 1758-631267, +880 1758-757670, +880 1764-082744, +880 1765-387795, +880 1766-611389, +880 1766-818144, +880 1766-868362, +880 1768-087382, +880 1768-217398, +880 1768-288640, +880 1768-526105, +880 1768-682035, +880 1769-924332, +880 1770-279969, +880 1770-742181, +880 1771-361713, +880 1771-580371, +880 1771-918155, +880 1772-022703, +880 1772-491094, +880 1772-771716, +880 1773-668171, +880 1773-948593, +880 1774-511095, +880 1775-220387, +880 1775-420468, +880 1777-555654, +880 1777-950000, +880 1778-800365, +880 1779-477795, +880 1780-556305, +880 1780-838351, +880 1781-216609, +880 1781-308515, +880 1782-430000, +880 1782-488971, +880 1783-301799, +880 1784-482437, +880 1784-691514, +880 1784-698469, +880 1785-388713, +880 1786-918228, +880 1787-840533, +880 1789-778557, +880 1790-444464, +880 1790-876585, +880 1790-897265, +880 1791-441964, +880 1793-293175, +880 1793-712492, 234 more
Facing the same problem when create a mesh model from point cloud.
Generally, the performance difference between an application running inside a Docker container and one running natively is minimal, especially for most use cases. Docker containers are designed to be lightweight and share the host's operating system kernel, which makes them faster than traditional virtual machines that require their own OS layer. However, a few factors can affect performance in certain scenarios:
I/O and File System Access: Disk I/O can be slower in Docker containers, particularly with certain storage drivers or if you are using bind mounts to access host file systems. This can cause a noticeable performance difference for applications that perform a lot of file reads/writes.
Networking Overhead: Docker adds some networking overhead, as it often uses virtualized networking. This may result in slower network operations compared to running natively, though it's usually a small difference.
Resource Constraints: Containers share the host's resources, and misconfigured limits on CPU, memory, or I/O can create bottlenecks. Properly setting resource limits or using dedicated resources can mitigate this.
Storage Driver Choices: Different storage drivers (like overlay2, aufs, or btrfs) have varying performance profiles. Choosing the right storage driver can make a big difference, especially for applications with heavy file I/O.
Compatibility with Host System: On some systems, like macOS, Docker requires a virtual machine to run, which does introduce a performance hit. However, on Linux, Docker runs natively, so the impact is typically negligible.
Overall, for most applications, the performance difference is minor. High-performance applications or those with specific needs might need fine-tuning or native execution, but Docker is generally suitable for most workloads.
If the error comes with an IN query, don't execute the query if the list or array is empty. just check the array is empty and ignore executing.
Users can submit a null
value even if they set the required
attribute using their browser developer tools. If the user opens the browser's developer tools, removes required
, and submits, a null
value is submitted. It's very easy for users with knowledge of browser developer tools. To prevent this, you need to add validation logic on the backend side.
To resolve: just use this
$uploadedFileUrl = $this->cloudinary->uploadApi()->upload($request->file('product_image')->getRealPath(), [ 'folder' => 'products', 'verify' => false ]);
Channel Blocking in doWork: When doWork runs, it tries to send a true value to the signal channel with signal <- true. This blocks if there is no available space in the channel buffer. This will cause the gorouting to block and cause the deadlock.
Thank you for the explanation. The component comes from external dependency. I cannot modify the tag on the template. I have access only to the [code-length] property. How can I make sure "aria-label" attribute is applied to each input. Example- If [code-length]= 2 , aria-label should be applied to 2 input elements. If [code-length]=4 , aria-label should be applied to 4 input elements.
found it. to print the address do:
print $rbp
I run my Spring boot jar app using Termux. I installed openjdk-21 in my Termux. I run it. While my spring app uses a db so I used a cloud db for that. Works like a magic.
I have try Different Way and Finally I found this answer
where: {
[Op.and]: [
Sequelize.where(
fn("DATE", col("createdAt")),
Op.eq,
fn("DATE", col("updatedAt"))
),
],
},
Using this We can find data that createdAt and updatedAt the same And this considers only date in comparison
When different dependencies in the project try to use different versions of the same library, to ensure compatibility gradle will demote to the lowest required version
Open terminal & run
./gradlew :app:dependencies
and check conflicts
When we talk about an 'embedding', be it a word embedding or a paragraph embedding, it is akin to a hashmap that transforms some input into a vector of numbers that can be tuned automatically by some downstream model, e.g. a Neural Network.
In your case, if you used paragraph embeddings, your hashmap keys would be the paragraph texts themselves, and you would run into the issue that your keys are too-high-dimensional, i.e. the same paragraph of text would never appear twice, which defeats the purpose of all tuning.
I think in this case a pretrained embedding that is powerful enough to encapsulate your specific use case would probably be the best way to go, but if you really want classification at the paragraph level, perhaps you could use some pooling or aggregation function to aggregate individual word embeddings in a paragraph into a "pooled paragraph embedding"? Perhaps Bag-of-Words would help you achieve this.
I am running into the same issue and installing systemfonts is not working for me (MacOS). My Rstudio is updated.
You can Refresh Content by clicking on the three dots on your project and refresh it. It is NECESSARY to close all the tabs of the project and re-open them again to work.
What is the point of the srcset attribute if mobile users end up downloading the higher resolution image along with the scaled down image anyway? Wouldn't that simply cause longer loading times, defeating the purpose performance-wise?
It seems like Visual Studio 17.11.5 (and toolset 14.41) added /arch:SSE4.2
.
Not very many SSE 4.2 instructions are used, on par with the old undocumented /d2archSSE42
, but it's also getting documented: https://learn.microsoft.com/en-us/cpp/build/reference/arch-x64?view=msvc-170
Please find this article for the same - https://www.marcogomiero.com/workshops/introducing-kotlin-multiplatform-in-an-existing-mobile-app
Formatting and comments can be added using the openxlsx
package. For example:
library(openxlsx)
# writing the data to a new workbook
wb <- createWorkbook()
addWorksheet(wb, "Sheet1")
writeData(wb, 1, df)
# apply to all cells with NAs:
apply(which(is.na(df), arr.ind = T), MARGIN = 1, FUN = function(cell) {
# overwrite the missing value
writeData(wb, 1,
sample(1:3, 1), # your interpolated value (here a random number)
startRow = cell['row']+1, startCol = cell['col']
)
# highlight the cell
addStyle(wb, 1,
createStyle(fgFill = 'orange'),
rows = cell['row']+1, cols = cell['col']
)
# add a comment to the cell
writeComment(wb, 1, col = cell['col'], row = cell['row']+1,
comment = createComment("Filled in with random number")
)
}) %>% invisible
# view the workbook (there's also a function for saving it to disk)
openXL(wb)
This worked: conda install -c conda-forge nbformat
yeeeaah, another project without description (i.e. Pyrebase) .. I mean why? Just explain what it does, and any value it adds. SMH
I got this error message while I am installing the Opencv module in Python 3.8 version. It is asking me to Be Failed to build again and again Please help me with this
ERROR: Failed building wheel for opencv-python Running setup.py clean for opencv-python Failed to build opencv-python
This issue is because of version compatibility between your Android Studio and the SDK versions you're using to fix your problem you can try->
2)Update Android studio.
3)Check Gradle Version->gradle/wrapper/gradle-wrapper.properties.
Yes, I synchronized removeFirst
but I missed to synchronized unstartedRunnables.add
.
TLDR;
If I run the removeFirst
, there is a small chance that add
adds a new element asynchroniously. We have two arrays having differrent sizes. That is why the ArrayIndexOutOfBoundsException
is thrown. In order to solve the problem, I need to synchronize the unstartedRunnables.add
call.
A good improvement suggestion however came from @rzwitserloot in order to not synchonize against the this-object.
If I am reading your question right, this can be done easily with Quarto's custom cross-reference syntax. Read that link for more details on this functionality. For example as a PDF:
---
format: pdf
crossref:
custom:
- key: hyp
kind: float
reference-prefix: Hypothesis
latex-env: hypothesis # This is required if you are using PDF format
---
This is some text prior to our custom cross-reference.
::: {#hyp-test}
## My Research Question
I hypothesize that A will cause B
:::
This is some text after the cross-reference, see @hyp-test above
Produces the following document:
I get the same problem for Chrome can't read that video format after I made a research on Google I get several sources saying that this is a mobile format made for optimizing file sharing between them. Chrome can read .mp4 files.
after doing my google search I get this: "3GP is a video container format designed for mobile devices. It's a simplified version of the MP4 format, with lower quality and smaller resolution. 3GP is great for sharing multimedia between devices, but it's not recommended for professional use"
If you are exporting statically there are some limitations with app/not-found.tsx Because it only works for server rendering or dynamic pages, so you need to use app/404.tsx for static export instead. This will generate 404.html after build.
I am trying to find the same but it appears that it isn't possible. Nest doesn't officially offer a public API for their Nest Aware platform, which includes security camera footage and activity data, due to privacy and security concerns. Since Google acquired Nest and integrated it into Google’s ecosystem, they’ve significantly limited third-party access, moving most device management to the Google Home API.
But if you found a solution!
Pessentrau - any updates on this? I am interested in adding a BLE python client to RPi too.
I can think of two methods that maybe used to meet your requirement.
Develop a scheduler plugin for this kind of job, and inject the node affinity based on the index of pod and node, to match with each other, for example, exec-job-0 will have int(0/4), so it'll be placed to exec-node-<0%4>, so it'll exec-node-0, exec-job-5 will have int(5/4), and it'll be placed to exec-node-1. If the node name is not predictable, maybe you should implement the mapping in the scheduler.
Use podAffinity and podAntiAffinity, for exec-job-0/4/8, let's call it lead pod on each node, inject a special label to these pods and let them schedule to different nodes using podAntiAffinity. The for the follwing pod, use podAffinity to schedule to the same node as its lead pod. For example, lead pod exec-job-0 on node-0 have label: job-head-pod(for podAntiAffinity) and job-group-0(for podAffinity), then for all head pod will be scheduled to different node due to job-head-pod label, and exec-job-1 will use podAffinity job-group-0 and scheduled to node-0.
The answer is that I needed to renew my GitHub token with usethis::create_github_token()
then gitcreds::gitcreds_set()
. I find this really surprising, as I wasn't interfacing with GH at all. Having done this, I could build
and install
without issue.
Insignia doesn't exist in WiX v5. If you're still using it, you're probably corrupting your bundle. Read the documentation on how to sign bundles correctly..
Thanks to @Tsyvarev's comments I found the problem and got the solution I share here in case someone finds a similar error:
Problems:
Foo_LIBRARY
var was empty so the target library was not created correctlyGLOBAL
so the scope of the target library was only the current fileFinally the solution that works to import a library and set its dependencies is:
FindFoo.cmake
# Look for the necessary header
find_path(Foo_INCLUDE_DIR NAMES foo.h)
mark_as_advanced(Foo_INCLUDE_DIR)
# Look for the necessary library
find_library(Foo_LIBRARY NAMES foo)
mark_as_advanced(Foo_LIBRARY)
# Extract version information from the header file
if(Foo_INCLUDE_DIR AND Foo_LIBRARY)
set(Foo_FOUND TRUE)
set(Foo_LIBRARIES bar baz)
endif()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Foo
REQUIRED_VARS Foo_INCLUDE_DIR Foo_LIBRARY Foo_LIBRARIES
)
# Create the imported target
if(Foo_FOUND)
set(Foo_INCLUDE_DIRS ${StormByte_INCLUDE_DIR})
if(NOT TARGET Foo)
add_library(Foo UNKNOWN IMPORTED GLOBAL)
set_target_properties(Foo PROPERTIES
IMPORTED_LOCATION "${Foo_LIBRARY}"
INTERFACE_INCLUDE_DIRECTORIES "${Foo_INCLUDE_DIRS}"
INTERFACE_LINK_LIBRARIES "${Foo_LIBRARIES}"
)
endif()
endif()
So with this configuration since now the target is GLOBAL
every other CMakeFiles.txt
can target Foo library with its dependencies with just: target_link_libraries(TARGET Foo)
if find_package(Foo)
has been executed somewhere.
Note: There is no need to execute find_package(Foo)
before or after final target, it will work anyway.
For the other folks that might be using dictionaries in jinja2 templates :
per an ai generator:
{% set users = [
{
'username': 'user1',
'password': 'password1',
'group': 'admin',
'ssh_key': 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEArX...'
},
{
'username': 'user2',
'password': 'password2',
'group': 'developer',
'ssh_key': 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEArY...'
},
{
'username': 'user3',
'password': 'password3',
'group': 'user',
'ssh_key': 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEArZ...'
}
] %}
{% for user in users %}
User: {{ user.username }}
Password: {{ user.password }}
Group: {{ user.group }}
SSH Key: {{ user.ssh_key }}
---
{% endfor %}
In your python code in the connection string use utf8mb4 when loading data from your database for example suppose you are loading data from mysql : connection_string = f"mysql+pyodbc://@{dsn}?charset=utf8mb4"
when writing to oracle from python , please set the data type of objectname to nvarchar.
You can separate your StickyHeader
and LazyRow
into two distinct parts within a Column
:
Column(modifier = modifier) {
StickyHeader()
LazyRow() {
// yourItems()
}
}
You can not remove event listeners whose callback is an arrow function. You would need to change the callback to a function that is not an arrow function.
This error usually happens if you're not in the same directory as the file. Here are solutions for different scenarios:
Terminal: Navigate to the directory where filename.java is located before running: cd /path/to/your/file javac filename.java IDE (e.g., VS Code): Clear the cache and reload the IDE. This should recognize the file even if the terminal isn’t in the exact directory.
Hope this helps! 😊
I can't comment on the above-accepted answer; hence, I am providing another answer.
Please note a few changes for the sample code to work in 2024.
https://
as it is appended by the library./
at the end of the communication-server. It leads to the wrong auth header and results in 401.example:
payload: = emails.Payload {
Headers: emails.Headers {
ClientCorrelationID: "some-correlation-id",
ClientCustomHeaderName: "some-custom-header",
},
SenderAddress: "<[email protected]>",
Content: emails.Content {
Subject: "Fabric tenant abcde wants to upgrade to paid offer",
PlainText: "This is being generated from dev-server.",
HTML: "<p>This is a <strong>test</strong> email.</p>",
},
Recipients: emails.Recipients {
To: [] emails.ReplyTo {
{
Address: "[email protected]",
DisplayName: "Garima Bathla",
},
},
},
Attachments: [] emails.Attachment {},
UserEngagementTrackingDisabled: false,
}
SenderAddress: "<[email protected]>",
I'm afraid you can't. See this answer on tauri's discussion forum - apparently webviews don't route API calls from web workers to the tauri core. Perhaps someday though.
Similar error for my application as well. Turns out to be URL rewrite rule at the machine level in IIS.
Not at site level which we could have noticed but someone mistakenly set-up incorrect rule at machine level.
You may be able to use:
rmdir "\\?\C:\inetpub\wwwroot\cd.."
As found in this Microsoft fileio page:
the "\\?\" prefix also allows the use of ".." and "." in the path names, which can be useful if you are attempting to perform operations on a file with these otherwise reserved relative path specifiers as part of the fully qualified path.
What I did was click on the Review Issue, and then there was a box to start using Insight on flows (I don't remember the exact phrase, but it was about Flow Insight) and I confirmed it. That was it.
I didn't take a screenshot because I wasn't expecting it to be the issue, but that worked.
Have you tried converting the dates to UTC before comparing? At least this would remove the timezone problem
It looks like adding the following to your Cloudflare page build configuration found in
Settings>Build>Build Configuration
fixes the deployment issue
and any subsequent 404 error you may get once the page deploys.
Build command:
npx ng build --verbose --output-path dist/cloudflare
Build output:
dist/cloudflare/browser
Optionally you can change your package.json build command to be:
npx ng build --verbose --output-path dist/cloudflare
d
and leave the cloudflare build command to the deafult of ng build.
If you are building successfully but getting a 404 change the build output to dist/cloudflare/browser
procedure MoveBack(Owner, Dest : TControl; zOrder : integer);
var
a : integer;
begin
a := Owner.Controls.IndexOf(TControl(Dest));
if a <> -1 then
begin
Owner.Controls.Delete(a);
Owner.Controls.Insert(zOrder, TControl(Dest));
end;
end;
Not absolut sure, but maybe you can configure it in the Customize Layout Settings.
How can there be no possible moves? With 3 colors and 4 non-empty stacks, there are always 2 stacks with the same color on top, so we can move all nuts of this color from the top of one stack to another. This either decreases the number of pairs of adjacent nuts with different colors, or adds an empty stack which we can use to do the same thing.
I use: file_exists(html_entity_decode(utf8_decode($filename)))
This works for me.
I've got mine working . . . Go to the line indicated in the log and find the function that has a parameter inside curly braces. Change them to square braces. That worked for me. I had to do this a few times, blank screens keep coming as I do more, but that seems to be the only problem.
Draga gospodo, ja sam glavom i bradom čovjek koji je pokraden ,više puta kodovima Gmail trenutno sam sada [email protected] pitajte Google, Microsoft, davno instaliran i onemogućen ne znam zašto i zbog čega tako na dosta situacija.
It was solved by just changing All to Any! Thank you for the help!
var genreBooks = allBooks.Where(x => x.Genres.All(y => y == "Horror")).ToList();
Became
var genreBooks = allBooks.Where(x => x.Genres.Any(y => y == "Horror")).ToList();
const mysql = require('mysql');
const db = mysql.createConnection({ host: 'localhost', user: 'root', password: '', database: 'my_database' });
module.exports = db;
I believe I identified the problem. The application I'm developing uses a clean architecture with DDD to enrich the models. For the User entity, which contains several properties, I defined all of them as ObjectValues. For example, for the Id, I created a class called UserId; for the name property, I created a class called UserName, and so on. I applied the same approach to the Role entity, creating a RoleId class for the Id and UserRole for the role name.
// Object Values / User
public class UserId
{
public Guid Value { get; }
private UserId(Guid value) => Value = value;
public static UserId Of(Guid value)
{
ArgumentNullException.ThrowIfNull(value);
if (value == Guid.Empty)
{
throw new Exception("UserId cannot be empty");
}
return new UserId(value);
}
}
public class UserName
{
public string Value { get; }
private UserName(string value) => Value = value;
public static UserName Of(string value)
{
ArgumentNullException.ThrowIfNull(value);
if (string.IsNullOrWhiteSpace(value))
{
throw new Exception("UserName cannot be empty");
}
return new UserName(value);
}
}
// Object Values / Role
public class RoleId
{
public Guid Value { get; }
private RoleId(Guid value) => Value = value;
public static RoleId Of(Guid value)
{
ArgumentNullException.ThrowIfNull(value);
return new RoleId(value);
}
}
public class RoleName
{
public string Value { get; }
private RoleName(string value) => Value = value;
public static RoleName Of(string value)
{
ArgumentNullException.ThrowIfNull(value);
if (string.IsNullOrWhiteSpace(value))
{
throw new Exception("RoleName cannot be empty");
}
return new RoleName(value);
}
}
Next, within each User and Role model, I created the respective navigation properties. In the User entity, I created the Roles property like this:
public List<Role> {get;set;}.
And in the Role entity, I created the Users navigation property:
public List<User> Users {get;set;}.
public class User : Entity<UserId>
{
public UserName? UserName { get; set; }
public List<Role> Roles { get; set; } = [];
public User() { }
public User(Guid id, string userName, string userEmail)
{
Id = UserId.Of(id);
UserName = UserName.Of(userName);
}
public static User Create(Guid id, string userName)
{
return new User(id, userName);
}
}
public class Role : Entity<RoleId>
{
public RoleName RoleName { get; set; } = default!;
public List<User> Users { get; } = [];
public Role() { }
public Role(Guid id, string roleName)
{
Id = RoleId.Of(id);
RoleName = RoleName.Of(roleName);
}
public static Role Create(Guid id, string roleName)
{
return new Role(id, roleName);
}
}
In the database context (SQL Server), within the OnModelCreate method, I defined table names, primary keys, and converted the ObjectValue properties to primitive values as follows: For example, for the User entity Id, I did this: builder.Property(u => u.Id).HasConversion(id => id.Value, value => new UserId(value));
. I applied the same logic to the rest of the User properties as well as for the Role entity. Finally, I defined the relationship between User and Role as many-to-many, as shown below:
// Users
builder.ToTable("Users");
builder.HasKey(u => u.Id);
builder.Property(u => u.Id).HasConversion(id => id.Value, value => UserId.Of(value));
builder.Property(u => u.UserName).HasConversion(prop => prop!.Value, value => UserName.Of(value));
// Roles
builder.ToTable("Roles");
builder.HasKey(r => r.Id);
builder.Property(r => r.Id).HasConversion(id => id.Value, value => RoleId.Of(value));
builder.Property(r => r.RoleName).HasConversion(prop => prop!.Value, value => RoleName.Of(value));
// Many-to-Many Relationship
modelBuilder.Entity<User>()
.HasMany(u => u.Roles)
.WithMany(ur => ur.Users);
Then, through the context injected in a controller method, I attempted to perform a simple query to retrieve the user identified by Id = "58c49479-ec65-4de2-86e7-033c546291aa" along with their assigned roles as follows:
var user = await _context.Users
.Include(user => user.Roles)
.Where(user => user.Id == UserId.Of("58c49479-ec65-4de2-86e7-033c546291aa"))
.SingleOrDefaultAsync();
When executing this query, it didn’t return the user's associated roles (which do exist), and it generated an exception indicating that the query returned more than one record that matches the filter, which isn't true since there's only one user with a single role in the database. After much trial and error, I decided to replace the ObjectValues used as identifiers for the User and Role entities with primitive values, in this case Guid. I removed the ObjectValue-to-Primitive transformation line in OnModelCreate for both user and role, resulting in the following setup:
// Users
builder.ToTable("Users");
builder.HasKey(u => u.Id);
builder.Property(u => u.UserName).HasConversion(prop => prop!.Value, value => new UserName(value));
// Roles
builder.ToTable("Roles");
builder.HasKey(r => r.Id);
builder.Property(r => r.RoleName).HasConversion(prop => prop!.Value, value => new RoleName(value));
// Many-to-Many Relationship
modelBuilder.Entity<User>()
.HasMany(u => u.Roles)
.WithMany(ur => ur.Users);
I also modified the User and Role entities:
public class User : Entity<Guid>
{
public UserName? UserName { get; set; }
public List<Role> Roles { get; set; } = [];
public User() { }
public User(Guid id, string userName)
{
Id = id;
UserName = UserName.Of(userName);
}
public static User Create(Guid id, string userName)
{
return new User(id, userName);
}
}
public class Role : Entity<Guid>
{
public RoleName RoleName { get; set; } = default!;
public List<User> Users { get; } = [];
public Role() { }
public Role(Guid id, string roleName)
{
Id = id;
RoleName = RoleName.Of(roleName);
}
public static Role Create(Guid id, string roleName)
{
return new Role(id, roleName);
}
}
After re-configuring the entities, I ran the query again, and voila! The user now returns the associated roles as expected. I'm not sure what happens with EFC 8 regarding entity identifiers of type ObjectValue, but it doesn't seem to handle them well. For now, I prefer to work with primitive data types for identifiers to avoid these issues. If this can help someone, great. Or, if anyone knows how to solve or address this, I'd love to hear about it. Cheers!
I have a similar filter that appears to work -- there's a pretty frustrating hiccup, though, as I'll try to explain below.
My method was to supply names of the classes that I want to keep, as a comma separated list, in a metadata argument to the pandoc cli. Inside the filter I parsed that list into a table using LPeg; I then applied a filter each to Blocks and Inlines elements, using that table to test inclusion.
Given this filter (classfilter.lua
on the LUA_PATH
):
-- split arglist by lpeg, function as it appears on lpeg documentation:
-- https://www.inf.puc-rio.br/~roberto/lpeg/
local function split(s, sep)
sep = lpeg.P(sep)
local elem = lpeg.C((1 - sep) ^ 0)
local p = lpeg.Ct(elem * (sep * elem) ^ 0) -- make a table capture
return lpeg.match(p, s)
end
local keeplist = {}
-- This function will go inside the Meta filter
-- the keeplist table will be available to the rest
-- of the filters to consult
local function collect_vars(m)
for _, classname in pairs(split(m.keeplist, ",")) do
keeplist[classname] = true
end
end
local function keep_elem(_elem)
-- keep if no class designation
if not _elem.classes or #_elem.classes == 0 then
return true
end
-- keep if class name in keeplist
for _, classname in ipairs(_elem.classes) do
if keeplist[classname] ~= nil then
return true
end
end
-- don't keep otherwise
return false
end
local function filter_list_by_classname(_elems)
for _elemidx, _elem in ipairs(_elems) do
if not keep_elem(_elem) then
_elems:remove(_elemidx)
end
end
return _elems
end
-- forcing the meta filter to run first
return { { Meta = collect_vars }, { Inlines = filter_list_by_classname, Blocks = filter_list_by_classname } }
-- and this pandoc cli command:
pandoc -f markdown -t markdown --lua-filter=classfilter.lua <YOUR EXAMPLE INPUT> -M 'keeplist=other,bw-only'
-- I get the following output:
## Images

{width="30px"}
<figure>
<figcaption>Color only image</figcaption>
</figure>
<figure>
<figcaption>Color only image</figcaption>
</figure>
<figure>
<figcaption>Color only image</figcaption>
</figure>
{.bw-only}
{.bw-only width="30px"}
{.bw-only width="30px"}
## Blocks
::: other
Block that shouldn't be filtered.
:::
::: Block that shouldn't be filtered. :::
::: bw-only
BW only block.
:::
## Spans
[Span that shouldn't be filtered]{.other}
[BW only span]{.bw-only}
## Links
[Link that shouldn't be filtered](link.html)
[Link that shouldn't be filtered](link.html){.other}
[BW only link](link.html){.bw-only}
... which appears to be the desired output, except the <figure>
tags, which I haven't been able to remove. Assuming that my Inline
filter removes only the src
element from a Figure
, and keeps the caption
intact, I tried iterating over Figure
and Image
elements in a separate filter, to locate Figure
blocks with empty contents
fields, to replace them with an empty table -- but that didn't alter the result at all. I mean, adding the following to the filter_list_by_classname
function before the ipairs loop:
_elems:walk({
Figure = function(a_figure)
if not a_figure.content[1].content[1] then
return {}
else
return a_figure
end
end
})
did nothing.
So maybe this could be the start of a solution.
Made a small project, i will probably change it up a little bit but at its core it has JS syntax highlighting inside CDATA: https://github.com/Hrachkata/RCM-highlighter
Not sure if you are still looking for an answer, the correct way to use this is {{#is_match}}. {{^is_match}} is a negative case.
Please note that you can run only one VACUUM command on a cluster at any given time. If you attempt to run multiple vacuum operations concurrently, Amazon Redshift returns an error.
Usage notes.
This is just an additional option for people in the future who run into the same problem as me. If you are new and too lazy to write boring commands or lines of code, you can check out the steps below 🤓
I found an answer that is embedded below (I hope). It is not the most pleasing solution but it works. Working directly with screen coordinates, event.x; NOT event.xdata; the magic for mpl_connect with twinx is inv = ax1.transData.inverted() x, y = inv.transform([(event.x, event.y)]).ravel()
import matplotlib.pyplot as plt
from matplotlib.widgets import Cursor
import numpy as np
import sys
coord=[]
fig, ax1 = plt.subplots()
ax2 = ax1.twinx()
ax1.plot([0, 1, 2, 3], [1, 2, 3, 4], 'b-')
ax2.plot([0, 1, 2, 3], [40, 30, 20, 10], 'r-')
def ontwinxclick(event):
global ix
ix = event.xdata
global coord
annot = ax1.annotate("", xy=(0,0), xytext=(-40,-40),textcoords="offset points",
bbox=dict(boxstyle='round4', fc='linen',ec='k',lw=1,alpha=.936,zorder=np.inf),
arrowprops=dict(arrowstyle='-|>',zorder=np.inf),zorder=np.inf,
size=12, color='orangered',style='italic',weight='bold')
annot2 = ax2.annotate("", xy=(0,0), xytext=(-40,-40),textcoords="offset points",
bbox=dict(boxstyle='round4', fc='linen',ec='k',lw=1,alpha=.936,zorder=np.inf),
arrowprops=dict(arrowstyle='-|>',zorder=np.inf),zorder=np.inf,
size=12, color='navy',style='italic',weight='bold')
annot.set_visible(False)
annot2.set_visible(False)
import math
for i, ax in enumerate([ax1, ax2]):
if ax == event.inaxes:
print ("Click is in axes: ax{}".format(i+1))
if event.button == 3:
if ax2 == event.inaxes:
print(f'Button: {event.button}, xdata: {event.xdata}, ydata: {event.ydata}')
x=event.xdata;y=event.ydata
annot2.xy = (x,y)
text="x2={:,.2f}".format(x)
text=text+"\ny2={:,.2f}".format(y)
annot2.set_text(text)
annot2.set_visible(True)
fig.canvas.draw()
else:
print(f'Button: {event.button}, xdata: {event.xdata}, ydata: {event.ydata}')
inv = ax1.transData.inverted()
x, y = inv.transform([(event.x, event.y)]).ravel()
print(f'Screen: {event.button}, x: {event.x}, y: {event.y}')
print(f'Data: {event.button}, x: {x}, y: {y}')
annot.xy = (x,y)
text="x={:,.2f}".format(x)
text=text+"\ny={:,.2f}".format(y)
annot.set_text(text)
annot.set_visible(True)
fig.canvas.draw()
coord.append((x,y))
#pdir(event.inaxes)
#ax1.set_zorder(0.1)
cursor = Cursor(ax1, color='darkviolet', linewidth=1.2,horizOn=True, \
vertOn=True, useblit=True)
fig.canvas.mpl_connect('button_press_event', ontwinxclick)
plt.show()
The result is enter image description hereenter image description here
Ok, I don't know whether this is the smoothest solution, but at least I am now able to execute a headless Azure Pipeline Deployment to my target server.
Before I call the docker compose command, I execute this:
gpg --pinentry-mode loopback --passphrase "$(GPG_KEY_PASSPHRASE)" --decrypt $(GPG_PASSWORD_STORE_PATH)
where $(GPG_PASSWORD_STORE_PATH)
represents the path to the .gpg file.
Then my docker compose command works.
Please find the link to the video for two sum: https://youtu.be/6PcYE1TQ54E
Thank you very much ptc-epassaro. As you mentioned Vuforia is not working in Holographic Remoting deployment. I deployed the app on the HoloLens and it is working now.
This is basically a dupe of asp.net core web API file upload and "form-data" multiple parameter passing to method .
You can not use multiple form parameters in the controller action (for e.g. form and body params). You need to encapsulate your parameters into one param object, as described in the linked post.
I have an older Intel chip and after downloading the new Sequoia update to the MacOS, my PgAdmin stopped working and just throws this error:
_LSOpenURLsWithCompletionHandler() failed with error -54.
Tried the above code from the command line: brew install --cask pgadmin4 --verbose
Still get the error. My MacBook does not need Rosetta since it's the older one. Anyone have any idea how to get the application working again?
I'am struggling with this errror too, I'm encountering the exact same "Validate signature error" when attempting to broadcast TRON transactions in my TypeScript application. I've ensured that my owner address matches the private key and have carefully set all necessary transaction parameters. Have you found a solution to this permission-related signature issue? Any guidance or insights would be greatly appreciated!
br {
display: block;
content: "";
margin-top: 20px;
margin-bottom: 20px;
}
Well, you could look at it this way. Using fragments when using fragments and multiple activities was a real pain. Think Fragment transactions - much pain. Therefore, the NavigationComponent was developed - no more pain. I hate even thinking back to those days. One other thing you've probably never used is a ConstraintLayout. It replaces the need for complex LinearLayouts. The Xamarin.Android Designer has not been updated in the last 4 or 5 years and will soon be removed from VS 2022. Therefore, it is probably time for you to install Android Studio because AS makes it very easy to set up a complex layout using a ConstraintLayout. Once done, you can copy/paste your new layout into a new empty Xamarin.Android XML file and build it. There are more sophisticated/efficient ways of getting it into VS, which I can expand on.
It might be an idea to take this stuff to email because I doubt that SO appreciates these types of answers.
I massively improved the code of @Arch . It uses chrome debugger and requires an extension.
class HiddenClass {
hiddenProperty;
constructor(value) {
this.hiddenProperty = value;
}
}
var Secret = (() => {
let _secret;
return function Secret(secret) {
_secret = secret;
}
})();
let obj = new Secret(new HiddenClass(1337));
console.log(await scopeInspect(obj, ["hiddenProperty"]));
// HiddenClass {hiddenProperty: 1337}
In JavaScript, a simple for loop (like the one in your function) is synchronous. This means each iteration of the loop runs to completion before moving on to the next one, and the function doesn't reach the return statement until the loop has finished all its iterations. Why do you think it is returning before ending the for loop?
If true randomness isn't necessary: Use first_value()
for simplicity which can retrieve the first value in each group:
SELECT a, first_value(b) OVER (PARTITION BY a ORDER BY b) AS arbitrary_b
FROM foo
GROUP BY a;
If maintainability and clarity are priorities, Option 2 (two .py files with a shared module) is generally the best practice, as it keeps each DAG isolated while avoiding code duplication. This method also allows you to schedule backfilling and ongoing runs independently and view them clearly in the Airflow UI.
Just call exitApp() inside a screen before return or
run from onPress={()=>exitApp()}
import { BackHandler } from "react-native";
const exitApp = () => {
BackHandler.exitApp();
}
Thank you for sharing the Trunk.toml example.
I have tried it and the build process was going in infinite loop. By a slight change, I made it rebuild only once, after detecting a change in any file located on ./src
folder.
[watch]
watch = ["./src"]
Regards,
Ok, I see SB-Prolog had unnumbervars/3:
https://www3.cs.stonybrook.edu/~sbprolog/manual2/node6.html
Only it has a different signature. But it is a start!
Discord has not yet implemented any Markdown formatting about tables, and there is no current way to print a table on Discord without client modifications, or sending an image file. It has been long demanded though.
Read about binary files on the Wikipedia page: https://en.wikipedia.org/wiki/Binary_file
when the subquery is no longer self contained , you need to have a connection between inner query and outer query that is why we use correlated queries which means It calculates for each line based on the data it receives from the outer query. for example use pubs data set and try this :
select title , [type] , price ,
(select avg(price) from dbo.titles as InnerQuery where InnerQuery.[type] = OuterQuery.[type]) as AVGPrice
from dbo.titles as OuterQuery
as a result you will have the average price of each book type
Much easier: Just type in HTML , that's all...
The file is moved to the path specified in the second parameter. However, if the drive (e.g., C:/) is not explicitly declared, the path is treated as relative. So, the file is moved to a location relative to where the executable is running, typically within the directory of the .sln file at /bin/Debug/net8.0/.
Already an old thread but I have a little problem. Somehow I have created a folder named: cd.. But I can't find any solution to delete this folder. It's on a Windows server 2016 machine. Normal deleting gives an error that the it's no longer located. Also tried with (admin rights) cmd and rmdir cd.. or rmdir "cd.." but also no luck.
Thanks everyone for the help. I used the feedback to now use a substitution in the string variable that may contain one or more IP addresses:
my $Event_text="This is a test string with a possible IP address: 10.10.10.100 but there is also 20.20.20.256";
my $New_text = $Event_text;
if ( $New_text =~ /\b$RE{net}{IPv4}\b/ )
{
print "IP FOUND\n";
$New_text =~ s/$RE{net}{IPv4}/ "X.X.X.X" /eg;
$Event_text = $New_text;
}
else
{
print "No IP\n";
}
print "Event_text: $Event_text\n";
This mostly works. But with this code, when one of the IPs in the string is invalid, it returns this output:
Event_text: This is a test string with a possible IP address: X.X.X.X but there is also X.X.X.X6
So you can see that it's trying to substitute the invalid octet "256" but it does so by leaving the last digit (6) for some reason.
I think the substitution requires a tweak around the $RE{net}{ipv4}. The description of the Regexp::Common does say "To prevent the unwanted matching, one needs to anchor the regexp: /^$RE{net}{IPv4}$/". But it's not clear how to implement that.
There are sample resolves published in the doc: https://docs.pdfsharp.net/PDFsharp/Topics/Fonts/Sample-Font-Resolvers.html
There is no answer for this question by using Jackson, so in the end in order to keep 3 < 7
as it is, I needed to modify client to post XML wrapped in <!CDATA[[3 < 7]]>
, then Jackson will not convert that to 3 < 7
(which is not the wanted behavior).
As of 2023, keras 3.0.0 supports other types of backends, specifically torch and jax. It most likely would be a good idea to write tensorflow "agnostic" keras code in the future, since in a real world scenario there is some boilerplate data handling usually mixed with model creation and decoupling from tf might be useful.
If is an expo app, use the code below first in stall the dependences and import it,
expo install expo-updates
import * as Update from expo-updates;
const onpress =async()=>{
await Update.reloadAsync();
}
To fix this we had to call this method when starting up the app in OnStart() in App.Xaml.cs and also call this if the user manually changes the theme in the in-app settings
public static void SetActivityColor()
{
#if ANDROID
var activity = Platform.CurrentActivity;
activity?.Window?.DecorView?.SetBackgroundColor(
App.CurrentTheme == AppTheme.Dark ?
Android.Graphics.Color.Black :
Android.Graphics.Color.White);
#endif
}
Unsure if this will be needed for IOS
Is anyone familiar with a solution similar to the one demonstrated in this video?
The post by @Thracian made me investigate their code, and then combined it with the code for CutCornerShape
and RoundedCornerShape
.
Here's the SemiRoundCutCornerShape
fun SemiRoundCutCornerShape(size: Dp, roundedLeft: Boolean = true) = SemiRoundCutCornerShape(size, size, roundedLeft)
fun SemiRoundCutCornerShape(cutSize: Dp, roundSize: Dp, roundedLeft: Boolean = true) = SemiRoundCutCornerShape(
topStart = CornerSize(roundSize),
topEnd = CornerSize(cutSize),
bottomEnd = CornerSize(roundSize),
bottomStart = CornerSize(cutSize),
roundedLeft = roundedLeft
)
class SemiRoundCutCornerShape(
topStart: CornerSize,
topEnd: CornerSize,
bottomEnd: CornerSize,
bottomStart: CornerSize,
private val roundedLeft: Boolean = true
) : CornerBasedShape(
topStart = topStart,
topEnd = topEnd,
bottomEnd = bottomEnd,
bottomStart = bottomStart,
) {
override fun createOutline(
size: Size,
topStart: Float,
topEnd: Float,
bottomEnd: Float,
bottomStart: Float,
layoutDirection: LayoutDirection
): Outline {
val roundOutline: Outline = Outline.Rounded(
when (layoutDirection == LayoutDirection.Ltr && roundedLeft) {
true -> RoundRect(
rect = size.toRect(),
topLeft = CornerRadius(if (layoutDirection == LayoutDirection.Ltr) topStart else topEnd),
bottomRight = CornerRadius(if (layoutDirection == LayoutDirection.Ltr) bottomEnd else bottomStart),
)
false -> RoundRect(
rect = size.toRect(),
topRight = CornerRadius(if (layoutDirection == LayoutDirection.Ltr) topEnd else topStart),
bottomLeft = CornerRadius(if (layoutDirection == LayoutDirection.Ltr) bottomStart else bottomEnd)
)
}
)
val cutOutline: Outline = Outline.Generic(
when (layoutDirection == LayoutDirection.Ltr && roundedLeft) {
true -> Path().apply {
var cornerSize = 0F
moveTo(0f, cornerSize)
lineTo(cornerSize, 0f)
cornerSize = topEnd
lineTo(size.width - cornerSize, 0f)
lineTo(size.width, cornerSize)
cornerSize = 0F
lineTo(size.width, size.height - cornerSize)
lineTo(size.width - cornerSize, size.height)
cornerSize = bottomStart
lineTo(cornerSize, size.height)
lineTo(0f, size.height - cornerSize)
close()
}
false -> Path().apply {
var cornerSize = topEnd
moveTo(0f, cornerSize)
lineTo(cornerSize, 0f)
cornerSize = 0F
lineTo(size.width - cornerSize, 0f)
lineTo(size.width, cornerSize)
cornerSize = bottomStart
lineTo(size.width, size.height - cornerSize)
lineTo(size.width - cornerSize, size.height)
cornerSize = 0F
lineTo(cornerSize, size.height)
lineTo(0f, size.height - cornerSize)
close()
}
}
)
return Outline.Generic(Path.combine(
operation = PathOperation.Intersect,
path1 = Path().apply { addOutline(cutOutline) },
path2 = Path().apply { addOutline(roundOutline) }
))
}
override fun copy(
topStart: CornerSize,
topEnd: CornerSize,
bottomEnd: CornerSize,
bottomStart: CornerSize
): CornerBasedShape = SemiRoundCutCornerShape(
topStart = topStart,
topEnd = topEnd,
bottomEnd = bottomEnd,
bottomStart = bottomStart
)
override fun toString(): String {
return "SemiRoundCutShape(topStart = $topStart, topEnd = $topEnd, bottomEnd = " +
"$bottomEnd, bottomStart = $bottomStart)"
}
}
Here used to create
SemiRoundCutCornerShape(8.dp)
SemiRoundCutCornerShape(24.dp, roundedLeft = false)
SemiRoundCutCornerShape(16.dp)
SemiRoundCutCornerShape(
topStart = CornerSize(60.dp),
topEnd = CornerSize(8.dp),
bottomEnd = CornerSize(16.dp),
bottomStart = CornerSize(20.dp)
)
I get the same error when trying to drag and drop a file into visual studio. (this behaviour started when i upgraded to Version 17.11.3).
Whats weird, should i open the target folder using 'windows explorer' and directly drag n drop, thats works. Once i have done that, i thereafter successfully drag and drop files directly into visual studio 🤷♀️
Just for completeness, very simple with WinGet on windows:
winget search julia
or just
winget install Julialang.Julia
then anytime
winget upgrade
Well I use Ubuntu 24.10 and I use this important program called Gnome-Tweaks and in there you can set the capslock key -> control key. EMACS alone you can't do this. I hope this helps others, later.
I was looking through the pi pico W diagram again and realized that that pins 23 24 25 are not actual pins. Maybe that is way the code did not work. Not sure but I will stick with this explanation for my self.
You can see the functions of the three pins in the table below the diagram: