Now it's possible to compile C++ code into WASM and than use it in Go with the help of wazero runtime. No CGO is needed. Consider go-sqlite3 as an example.
Here's my fixing up of it: main.js
async function getLatestRelease(repo) {
const response = await fetch(`https://api.github.com/repos/${repo}/releases/latest`);
const data = await response.json();
return data.tag_name;
}
function downloadFile(url, filename) {
const link = document.createElement('a');
link.href = url;
link.download = filename;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
}
async function showPopup(type) {
let popupText = '';
let downloadLink = '';
if (type === 'dev') {
popupText = 'Thank you for downloading Comp_V3 *dev*! This download link goes to the Github API. This is the source code. <br> <br> You will need my Custom SDK to use this. Check out my other download in the navbar.';
downloadLink = 'https://github.com/RanchoDVT/Comp-V5/archive/refs/heads/dev.zip';
document.getElementById('popup-title').innerText = 'Download Comp-V3 ' + type;
} else if (type === 'stable') {
const latestTag = await getLatestRelease('RanchoDVT/Comp-V5');
popupText = 'Thank you for downloading Comp_V3 stable! This download link goes to the Github API. This is the source code. <br> <br> You will need my Custom SDK to use this. Check out my other download in the navbar.';
downloadLink = `https://github.com/RanchoDVT/Comp-V5/archive/refs/tags/${latestTag}.zip`;
document.getElementById('popup-title').innerText = 'Download Comp-V3 ' + type + ' ' + latestTag;
} else if (type === 'sdk') {
const latestTag = await getLatestRelease('RanchoDVT/Vex-SDK');
popupText = 'Thank you for downloading my custom SDK. This is unofficial and in no way affiliated, endorsed, supported, or created by VEX Robotics. <br> <br> You will need this to install my Custom SDK (This) to use my Comp_V3 Program. This modifies Vex\'s robotics extension, so PLEASE don\'t go to them if you have problems with this. Please contact me. <br> <br>There is a PowerShell script for this to make it easier: ';
popupText += '<a href="https://minhaskamal.github.io/DownGit/#/home?url=https://github.com/RanchoDVT/Vex-SDK/blob/dev/Vex-SDK.updater.ps1">Powershell download</a>';
document.getElementById('popup-title').innerText = 'Download Custom ' + type + ' ' + latestTag;
downloadLink = `https://github.com/RanchoDVT/Vex-SDK/archive/refs/tags/${latestTag}.zip`;
}
document.getElementById('popup-text').innerHTML = popupText; // Use innerHTML to render HTML content
document.getElementById('download-link').href = downloadLink;
document.getElementById('popup').classList.add('active');
document.getElementById('overlay').classList.add('active');
}
function hidePopup() {
document.getElementById('popup').classList.remove('active');
document.getElementById('overlay').classList.remove('active');
}
the navbar:
<nav>
<li><a class="nav-link" data-page="index.html" href="index.html">Home</a></li>
<li class="dropdown">
<a class="nav-link" data-page="projects.html" href="projects.html">Projects</a>
<div class="dropdown-content">
<a target="_blank" href="https://github.com/Voidless7125/Comp-V5">Comp V3</a>
<a target="_blank" href="https://github.com/RanchoDVT/Vex-SDK">Custom SDK</a>
<a target="_blank" href="https://ranchodvt.github.io/Comp-V5/">This website!</a>
</div>
</li>
<li class="dropdown">
<a class="nav-link">Downloads</a>
<div class="dropdown-content">
<a onclick="showPopup('stable')">Comp_V3 Stable</a>
<a onclick="showPopup('dev')">Comp_V3 Dev</a>
<a onclick="showPopup('sdk')">Custom SDK Stable</a>
</div>
</li>
<li><a class="nav-link" data-page="features.html" href="features.html">Features</a></li>
<li><a class="nav-link" data-page="contact.html" href="contact.html">Contact</a></li>
<li style="float: right;"><a class="nav-link" data-page="about.html" href="about.html">About</a></li>
</nav>
<!-- Pop-Up Structure -->
<div id="popup" class="popup">
<div class="popup-header">
<h2 id="popup-title">Download</h2>
</div>
<p id="popup-text"></p>
<button class="cancel-btn" onclick="hidePopup()">Cancel</button>
<a id="download-link" class="download-btn" href="#" download>Download</a>
</div>
<div id="overlay" class="overlay" onclick="hidePopup()"></div>
and the css:
.popup {
display: none;
position: fixed;
left: 50%;
top: 50%;
transform: translate(-50%, -50%);
width: 400px;
border: 1px solid #ccc;
padding: 20px;
background-color: #fff;
box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);
z-index: 1000;
border-radius: 8px;
}
.popup.active {
display: block;
background-color: black;
}
.popup-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 10px;
background-color: black;
}
.popup-header h2 {
margin: 0;
font-size: 18px;
background-color: black;
}
.download-btn, .cancel-btn {
display: inline-block;
margin-top: 10px;
padding: 10px 20px;
border: none;
border-radius: 4px;
cursor: pointer;
}
.download-btn {
background-color: #4CAF50;
color: white;
text-decoration: none;
text-align: center;
}
.cancel-btn {
background-color: #f44336;
color: white;
}
.overlay {
display: none;
position: fixed;
left: 0;
top: 0;
width: 100%;
height: 100%;
background: rgba(0, 0, 0, 0.5);
z-index: 999;
}
.overlay.active {
display: block;
}
You used a dependency that was built with .NET 4.0, if your project configuration is with .NET 2.0.. 3.5.
For Example : enter image description here
and config in project is .net 3.5 enter image description here
**
**
I'm struggling to read a growing MXF file in real time for a live sports streaming project. I can read the video in 5-minute chunks provided by the recording software, and Iām able to load the full file (around 750GB) once it's complete. However, I need to process the file as itās still growing. Any suggestions on how to approach this?
How to add Google Map API to Web
<script
src="https://maps.googleapis.com/maps/api/js?key=YOUR_API_KEY&loading=async&libraries=maps&v=beta" defer>
</script>
https://developers.google.com/maps/documentation/javascript/add-google-map
how i felt when bro said ancient : šš¬š š ššš»
You may try:
=LET(a,TOCOL(C7:E27,1),SORT(UNIQUE(ARRAYFORMULA(TOCOL(a,1)+TOCOL(ARRAYFORMULA(6-TOCOL(ARRAYFORMULA(WEEKDAY(a)),1)),1)))))
Output:
Reference
1 Build you project in release mode.
2 Copy the .exe from the release folder to the folder you wond to deploy e.g C:\path\to\folder\deployfolder.
3 Open the tools for example: Qt 6.5.3 (MinGW 11.2.0 64-bit).
4 Write command
winddeployqt C:\path\to\folder\deployfolder and that's it.
Is the issue resolved . I am also facing the same post angular 18 migration. Please help
I tried your code and it seems to be missing and it has an error. I modified and converted your python
into Javascript
and added the forEach()
function to iterate each data if the ID is matched in your array it will return the CRYPTO value of EURO as to match in your script json.data[x].quote.EUR.price
.
Code.gs
function getCryptoData(id) {
var url = "https://pro-api.coinmarketcap.com/v1/cryptocurrency/listings/latest?convert=EUR";
var apiKey = 'xyz'; // Replace with your API key
var headers = {
"X-CMC_PRO_API_KEY": apiKey,
"Accept": "application/json"
};
var response = UrlFetchApp.fetch(url, { 'headers': headers });
var json = JSON.parse(response.getContentText());
json.data.forEach(x => {
if (x.id == id) {
console.log(x.name, x.quote.EUR.price)
return x.quote.EUR.price
}
})
}
Sample Output:
Reference:
@aled Thank you for your answer! I found it extremely helpful, even though I was barking up the wrong tree and asking the wrong questions. The solution I found was to add another ee:transform component after the wsc:consume component. I was also able to simplify my existing ee:transform so that I didn't have to map each individual element. The Add:\soapkit-config flow now looks like this:
<flow name="Add:\soapkit-config">
<ee:transform doc:name="Transform Input">
<ee:message>
<ee:set-payload><![CDATA[%dw 2.0
output application/xml
ns ns0 http://tempuri.org/
---
payload.body]]></ee:set-payload>
</ee:message>
</ee:transform>
<wsc:consume config-ref="Web_Service_Consumer_Config" operation="Add" doc:name="Outside Consumer" />
<ee:transform doc:name="Transform Output">
<ee:message>
<ee:set-payload><![CDATA[%dw 2.0
output application/xml
ns ns0 http://tempuri.org/
---
payload.body]]></ee:set-payload>
</ee:message>
</ee:transform>
</flow>
When I put logging components in, I can see that after the Transform Output, the payload looks like this:
<?xml version='1.0' encoding='UTF-8'?>
<AddResponse xmlns="http://tempuri.org/">
<AddResult>579</AddResult>
</AddResponse>
... then control flows back to the SOAP Router which puts the payload into a SOAP Body, so it looks like this:
<soap:Envelope xmlns:soap="http://www.w3.org/2003/05/soap-envelope">
<soap:Body>
<AddResponse xmlns="http://tempuri.org/">
<AddResult>579</AddResult>
</AddResponse>
</soap:Body>
</soap:Envelope>
try changing local.settings.json
"FUNCTIONS_WORKER_RUNTIME": "dotnet-isolated"
You can create profile-specific configuration files, like application-local.properties and application-sandbox.properties, where you define the logback configurations for "local" and "sandbox" profiles respectively.
Place the shared properties in application.properties, which will be loaded across all profiles. The application-local.properties and application-sandbox.properties files will inherit from application.properties and can override specific settings as needed.
This post looks old so for newer users needing to update the client requests.
If using the most up to date spring security stuff, it will use a webclient in the backend to actual talk with the Reddit auth service. You will need to follow the instructions here https://docs.spring.io/spring-security/reference/reactive/oauth2/index.html#oauth2-client-customize-web-client, passing in the customized WebClient that sets the default header to the oauth clients to use when making the request.
Of course it was a problem with the lifecycle of my activity. When I use ViewTreeObserver.OnGlobalLayoutListener
instead of .post
, the coordinates are returned perfectly.
charlesparker@Charless-Mini KandR % ls -l /usr/bin/gcc -rwxr-xr-x 77 root wheel 119008 Aug 4 06:31 /usr/bin/gcc charlesparker@Charless-Mini KandR % which cc /usr/bin/cc charlesparker@Charless-Mini KandR % ls -l /usr/bin/cc -rwxr-xr-x 77 root wheel 119008 Aug 4 06:31 /usr/bin/cc charlesparker@Charless-Mini KandR % which clang /usr/bin/clang charlesparker@Charless-Mini KandR % ls -l /usr/bin/clang -rwxr-xr-x 77 root wheel 119008 Aug 4 06:31 /usr/bin/clang
Mac mini 2018 Macos Sonoma 14.6.1
I credit most of the logic for this solution to @codtex and @stritch000. I noticed that the solution from @stritch000 uses the same adjustment rule for both the current and next year lookups, which might produce incorrect results if the adjustments change after the first year.
Here is an updated solution that addresses the next-year lookup while also preserving the southern hemisphere fix. It also uses Linq. If you plan to run this for many dates, you could cache the results of the Linq query for a given year/timezone in a dictionary
public static DateTime? GetNextTransition(DateTime asOfTime, TimeZoneInfo timeZone)
{
var getAdjs = from adj in timeZone.GetAdjustmentRules()
from yr in (int[])[asOfTime.Year, asOfTime.Year + 1]
from t in (TimeZoneInfo.TransitionTime[])[adj.DaylightTransitionStart, adj.DaylightTransitionEnd]
where adj.DateStart.Year <= yr && adj.DateEnd.Year >= yr
select GetAdjustmentDate(t, yr);
if (getAdjs.Where(a => a > asOfTime).Any())
{
return getAdjs.Where(a => a > asOfTime).Min();
}
return null;
}
public static System.Globalization.Calendar cal = System.Globalization.CultureInfo.CurrentCulture.Calendar;
public static DateTime GetAdjustmentDate(TimeZoneInfo.TransitionTime transitionTime, int year)
{
if (!transitionTime.IsFixedDateRule)
{
int minDate = transitionTime.Week * 7 - 6; //1, 8, 15 ... This is the earliest date that works for transition.
var minDateDayOfWeek = cal.GetDayOfWeek(new DateTime(year, transitionTime.Month, 1)); //the day for minDate
int dayDiff = (transitionTime.DayOfWeek - minDateDayOfWeek + 7) % 7;
int transitionDay = minDate + dayDiff;
if (transitionDay > cal.GetDaysInMonth(year, transitionTime.Month))
transitionDay -= 7;
return new DateTime(year, transitionTime.Month, transitionDay, transitionTime.TimeOfDay.Hour, transitionTime.TimeOfDay.Minute, transitionTime.TimeOfDay.Second);
}
else
{
return new DateTime(year, transitionTime.Month, transitionTime.Day);
}
}
Turns out that I had a rogue tab related to a work project.
Normally, stopping the dev server closes all related tabs. . . except maybe it didn't, and that remaining tab kept polling for the server. So when I started a new server for a different proj, it was polling on the same port and getting 404s back.
try adding ignore scripts --> "npm i node-sass --ignore-scripts"
You don't need to use .tabViewStyle(.page(indexDisplayMode: .never)) modifier to achieve this, you can do this by binding the selectedTab with TabView.
struct ContentView: View {
@State var selectedTab = 0
var body: some View {
ZStack(alignment: .bottom) {
TabView(selection: $selectedTab) {
HomeView(page: 0)
.tag(0)
HomeView(page: 1)
.tag(1)
HomeView(page: 2)
.tag(2)
HomeView(page: 3)
.tag(3)
HomeView(page: 4)
.tag(4)
}
RoundedRectangle(cornerRadius: 25)
.frame(width: 350, height: 70)
.foregroundColor(.white)
.shadow(radius: 0.8)
HStack {
ForEach(0..<5, id: \.self) { index in
Button {
selectedTab = index
} label: {
CustomTabItem(imageName: "cross", title: "Item \(index)", isActive: (selectedTab == index))
}
}
}
.padding(.horizontal, 30)
.frame(height: 70)
}
}
@ViewBuilder func CustomTabItem(imageName: String, title: String, isActive: Bool) -> some View{
VStack(alignment: .center) {
HStack(alignment: .center) {
Spacer()
Image(systemName: imageName)
.resizable()
.renderingMode(.template)
.foregroundColor(isActive ? .purple : .gray)
.frame(width: 25, height: 25)
Spacer()
}
Text(title)
.foregroundColor(isActive ? .purple : .gray)
}
}
}
struct HomeView: View {
var page: Int
var body: some View {
NavigationView {
VStack {
NavigationLink(destination: HomeDetailView()) {
Text("Tab \(page) is selected")
}
}
.background(.red)
}
.navigationViewStyle(.stack)
}
}
struct HomeDetailView: View {
var body: some View {
Rectangle()
.fill(.orange)
}
}
May I add an additional scenario to discuss in this thread?
take snapshot A of the EBS volume
add some data/file and change empty blocks on the EBS volume
delete/remove data/file from the EBS volume
take snapshot B of the EBS volume
Are changed blocks from step 2 switched to empty ones and they are not included into the snapshot B OR they are considered as changed and are included in the snapshot B although they are empty?
There's a simpler solution I believe that I found in this medium article: https://kulembetov.medium.com/preventing-flash-of-unstyled-content-fouc-in-next-js-applications-61b9a878f0f7
Basically, it consists of adding visibility: hidden; to the body element by default and then adding the visibility back client side once the main layout has mounted (like so for instance: document.body.classList.add('body-visible');
If you are using NewtonsoftJson, make sure to add Nuget package reference to Microsoft.AspNetCore.Mvc.NewtonsoftJson also.
I am also on Windows. I use --force-polling
and it works. I got the solution from this Github issue
This flag forces Liquid to use polling rather than inotify. So, note that it is more CPU intensive.
I faced this issue and was able to resolve it by updating my typescript version; I'm now currently using "typescript": "^5.6.3"
. Try that and see if it works. Good luck!
It is unprofessional that only 15 days have passed between the announcement of the deprecation of the functionality and its removal.
You need a the micro python from pimoroni to work with the GFX PACK
I can only use REST API's to use instance_config to exclude fields. It is still not working with Python APIs. Would appreciate someone's help as there is no code samples for this feature from Vertex AI.
go to : Control Panel -> Credential Manager -> Generic Credentials
choose the edit option and update your "User name" and "Password"
Check if you have mySQL installed and not just the workbench
I have less than 50 reputations, but I need to ask SUBHRA SANKHA, if he was able to find a way around this problem.
Note: 𫵠You can help. This issue has been filed as Gerrit bug 40015217. Please +1 it and upvote this question to help it get the attention it deserves.
File rename operations in a git commit are identified by computing the similarity (percentage of lines unchanged) of pairs of files in the commitāone that was deleted and one that was added. If the similarity exceeds some threshold, the file deletion and file addition together are considered a file rename instead. Gitās default similarity threshold for rename detection is 50%. This is clearly documented and Google knows it well.
Gerrit uses JGit, and for some reason its similarity threshold for rename detection is 60% and has been since at least 2010 (commit 978535b). Whatās more, the threshold isnāt customizable. jgit diff
has a -M
for detecting renames, but it doesnāt accept a custom threshold.
Here are some of the problems that Gerrit users face as a result of Gerrit using a different rename detection threshold than git:
Authors can usually work around this issue by splitting a file rename + edits into multiple commits. In some compiled languages like Java, file renames usually require some edits (such as to the package
line and/or class name) for the file to continue compiling. The minimum amount of edits required to appease the compiler usually keep a fileās similarity index over 60% though. Other edits can come in separate (prior or follow-up) commits.
Authors may not realize, however, that Gerrit wonāt properly identify their file rename until after theyāve prepared the commit, written a commit title and description, and uploaded it for review. Their local git installation and git tools (such as IntelliJās git integration, for example) properly identified the file rename before they uploaded the commit to Gerrit. At this point, reworking the commit to work around a Gerrit limitation may have significant time cost, since the commit may have several more on top of it, and all of them might need to be rebased and have acceptance tests run again as a result. In short, the workaround may not always be quick and low-cost.
This issue has been reported and discussed in several other places.
It seems that BitBucket (or Stash) recently made the copy/rename similarity thresholds configurable (BSERV-3249). They use git rather than JGit.
IntelliJ/IDEA (a Java application) once tried using JGit for its git plugin, but concluded in this IJPL-88784 comment (emphasis added):
My comments given 18 months ago are still valid. JGit is still below Git. Moreover, you might be surprised, but we actually gave JGit a try: we used it in IDEA 11.X and 12.0 for HTTP remote operations. Our users got a lot of problems that were not easy to fix or even to reproduce, so we've rolled back to native Git. If other projects are happy with JGit, that's their funeral, we have our own vision on the subject.
So it is not because we are lazy to rewrite the plugin. We just don't want to fix issues for the JGit team, and on the other hand we can't say users "blame JGit" if they experience problems with IDEA which they don't have in the command line: they will still blame IDEA, because they don't care which library do we use inside.
I've solved installing ipywidgets
pip install ipywidgets
In my case export DOCKER_HOST="unix://${HOME}/.rd/docker.sock"
also worked. If it works for you, be sure to add it to your shell startup. No need to enable administrative access in Rancher Preferences. Apparently default DOCKER_HOST is /var/run/docker.sock
.
check redirect URI from google api console and try with updating expo configarations
If you are using MIUI. Turn off the MIUI optimization. For this: Settings > Developer options > MIUI optimisation
The ElastiCache service is designed to be accessed exclusively from within AWS.
If you want to access it from your local machine, the easiest and the cheapest way is to you use AWS SSM Start-Session with port forwarding (https://docs.aws.amazon.com/systems-manager/latest/userguide/session-manager-working-with-sessions-start.html#sessions-remote-port-forwarding).
First install the Session Manager plugin for AWS CLI (https://docs.aws.amazon.com/systems-manager/latest/userguide/session-manager-working-with-install-plugin.html)
Login via CLI to AWS and run:
aws ssm start-session \
--target instance-id \
--document-name AWS-StartPortForwardingSessionToRemoteHost \
--parameters '{"host":["redis-host.us-east-2.elasticache.amazonaws.com"],"portNumber":["6379"], "localPortNumber":["6379"]}'
Then you can access your redis on localhost:6379
. Make sure to test first with tls
disabled.
That was false positive case and it's already resolved https://github.com/detekt/detekt/issues/3145
As an enhancement, the check could look at the actual byte code generated. If the spread operator leads to a new array instantiation, the spread operator is a performance issue and should be reported. If the spread operator passes through an existing array, the spread operator is not a performance issue and should not be reported.
At my company we just started using .jenkinsfile as an extension.
build.jenkinsfile
and deploy.jenkinsfile
and so forth. It's worked well and our IDE's are able to parse the syntax just fine.
I hade same problem today, Try open Resource file with legacy and add files. Righ click Resource file => Open With => (Legacy) Hope this helps,
There's a new feature in terraform 1.9 that can help here, a variable validation can now refer to another variable: https://github.com/hashicorp/terraform/blob/v1.9/CHANGELOG.md#190-june-26-2024
Events are fired if you call delete
on each model, not on the builder.
//events not fired
Submission::query()->delete();
//events fired
Submission::query()->get()->each->delete();
Live code example in laravel sandbox: https://sandbox.ws/en/shared/e0eadb68-f145-46a2-9981-188ee3c34e8a
People why so complicated?
const ExtractDomain = url => url.includes('/') ? url.split('/')[2] : '';
I'm starting to think it's something global or something.
I started having this problem 2 or 4 days ago, nothing changed in my app, however today when I tried to do an npm run build
, I get the same thing.
SyntaxError: Unexpected identifier '#Y'.
at wrapSafe (node:internal/modules/cjs/loader:1427:18)
at Module._compile (node:internal/modules/cjs/loader:1449:20)
at Module._extensions.js (node:internal/modules/cjs/loader:1588:10)
at Module.load (node:internal/modules/cjs/loader:1282:32)
at Module._load (node:internal/modules/cjs/loader:1098:12)
at TracingChannel.traceSync (node:diagnostics_channel:315:14)
at wrapModuleLoad (node:internal/modules/cjs/loader:215:24)
at Module.require (node:internal/modules/cjs/loader:1304:12)
at mod.require (E:\git-challenge-traveler-visitor-visitant_modules_modules_next_distress-server-require-hook.js:65:28)
in require (node:internal/modules/helpers:123:16)
> A compilation error has occurred
Error: Failed to collect page data for /api/dashboard/create-company
at E:\git-challenge-traveler-visitant āclientā modules ānextdistinctā.js:1268:15
in process.processTicksAndRejections (node:internal/process/task_queues:95:5) {
type: 'Error'
}
I tried doing the build on past commits where the site was working fine, and it's still the same. Does anyone know where one could report such an error, I think probably in the vercel forum, however I was asked what I did and the truth is that nothing.
I was insanely jealous when Van Jacobson of LBL used my kernel ICMP support to write TRACEROUTE, by realizing that he could get ICMP Time-to-Live Exceeded messages when pinging by modulating the IP time to life (TTL) field. I wish I had thought of that! :-) Of course, the real traceroute uses UDP datagrams because routers aren't supposed to generate ICMP error messages for ICMP messages.
it is working in live environment be sure that you visit the link http://localhost/opencart/index.php?route=custom/simplejson without any .PHP extension at the end
Problem solved, for those who need or have a similar problem I share the code. Thanks to everyone for commenting and responding.
<?php
include("conexion.php"); // Asumiendo que tu conexión estĆ” configurada aquĆ
require __DIR__ . "/vendor/autoload.php";
$objCon = new Conexion();
use PhpOffice\PhpSpreadsheet\Spreadsheet;
use PhpOffice\PhpSpreadsheet\IOFactory;
use PhpOffice\PhpSpreadsheet\Cell\Coordinate;
use Monolog\Level;
use Monolog\Logger;
use Monolog\Handler\StreamHandler;
use Monolog\Handler\FirePHPHandler;
// Configuración bÔsica para mostrar todos los errores
error_reporting(E_ALL);
ini_set('display_errors', 'On');
//Guardando el tipo_actividad e id_reporte
$tipo_Actividad=$_POST['tipo_actividad'];
$id_reporte = $_POST['id_reporte'];
class Registro
{
public $interno;
public function __construct($data)
{
$this->interno = [
'doc_identidad' => $data['documento_identidad'],
'datos_adicionales' => [
'nombre_completo' => $data['apellidos_nombres_interno'],
'fecha_ingreso' => $data['fecha_ingreso'],
'fecha_nac' => $data['fecha_nacimiento'],
'discapacidad' => $data['discapacidad'],
'planificacion_intervencion' => $data['planificacion_intervencion'],
'tipo_intervencion' => $data['tipo_intervencion'],
'grupo_especifico' => $data['grupo_especifico'],
'otros_relevantes' => $data['otros_relevantes'],
'regimen' => $data['regimen'],
'etapa' => $data['etapa'],
'pabellon' => $data['pabellon'],
'descripcion' => mb_convert_encoding($data['descripcion'], "UTF-8"),
'dia' => $data['dia'],
'nombre_sesion' => mb_convert_encoding($data['nombre_sesion'], "UTF-8"),
'profesional' => $data['datos_profesional'],
'observaciones' => $data['observaciones']
// ... otros datos adicionales
]
];
// $this->nombre_completo = $data['apellidos_nombres_interno'];
}
}
class GeneradorReporteExcel
{
private $spreadsheet;
private $sheet;
private $logger;
public function __construct()
{
$this->spreadsheet = new Spreadsheet();
$this->sheet = $this->spreadsheet->getActiveSheet();
//Logs
$this->logger = new Logger('my_app');
$this->logger->pushHandler(new StreamHandler(__DIR__ . '/reportes/debug.log', Level::Debug));
// $this->logger->info('My logger is now ready');
$this->logger->pushHandler(new FirePHPHandler());
}
private function obtenerValor($registro, $columna, $fila, $tipoActividad)
{
switch ($columna) {
case 'C':
return $registro->interno['doc_identidad'];
case 'D':
return $registro->interno['datos_adicionales']['nombre_completo'];
case 'E':
return 'MASCULINO';
case 'F':
return $registro->interno['datos_adicionales']['fecha_ingreso'];
case 'G':
return $registro->interno['datos_adicionales']['fecha_nac'];
case 'H':
// Obtener la celda de fecha de nacimiento (suponiendo que 'G2' es relativa)
$celdaFechaNacimiento = 'G' . $fila; // Ajusta la fila según tu lógica
// Construir la fórmula completa
$formula = "=(NOW()-" . $celdaFechaNacimiento . ")/365-0.5";
return $formula;
case 'I':
return $registro->interno['datos_adicionales']['discapacidad'];
case 'J':
//planificacion d ela intervencion
if ($registro->interno['datos_adicionales']['planificacion_intervencion'] == 1) {
$planificacion_intervencion = "PTI_EN_PROCESO";
}
if ($registro->interno['datos_adicionales']['planificacion_intervencion'] == 2) {
$planificacion_intervencion = "PTI_P";
}
if ($registro->interno['datos_adicionales']['planificacion_intervencion'] == 3) {
$planificacion_intervencion = "PTI_S";
}
if ($registro->interno['datos_adicionales']['planificacion_intervencion'] == 4) {
$planificacion_intervencion = "POPE_ANTIGUA";
}
return $planificacion_intervencion;
case 'K':
//tipo de intervencion
if ($registro->interno['datos_adicionales']['tipo_intervencion'] == 1) {
$tipo_intervencion = "INDUCCION_Y_ADAPTACION_AL_REGIMEN_PENITENCIARIO";
}
if ($registro->interno['datos_adicionales']['tipo_intervencion'] == 2) {
$tipo_intervencion = "INTERVENCION_GENERAL";
}
if ($registro->interno['datos_adicionales']['tipo_intervencion'] == 3) {
$tipo_intervencion = "INTERVENCION_ESPECIALIZADA";
}
if ($registro->interno['datos_adicionales']['tipo_intervencion'] == 4) {
$tipo_intervencion = "PROGRAMACIĆN_SOBRE_NECESIDADES_DE_INTERVENCIĆN_COMPLEMENTARIA";
}
return $tipo_intervencion;
case 'L':
return $registro->interno['datos_adicionales']['grupo_especifico'];
case 'M':
return $registro->interno['datos_adicionales']['otros_relevantes'];
case 'N':
return $registro->interno['datos_adicionales']['regimen'];
case 'O':
return $registro->interno['datos_adicionales']['etapa'];
case 'P':
return $registro->interno['datos_adicionales']['pabellon'];
case 'Q':
case 'S':
case 'U':
case 'W':
case 'Y':
case 'AA':
case 'AC':
if ($tipoActividad === 1) {
$descripcion=$registro->interno['datos_adicionales']['descripcion']; // O el campo de descripción que corresponda
} else {
$descripcion='';
}
return $descripcion;
case 'R':
case 'T':
case 'V':
case 'X':
case 'Z':
case 'AB':
case 'AD':
if ($tipoActividad === 1) {
$dia=$registro->interno['datos_adicionales']['dia']; // O el campo de descripción que corresponda
} else {
$dia='';
}
return $dia;
case 'AE':
case 'AG':
case 'AI':
case 'AK':
case 'AM':
if ($tipoActividad == 2) {
// Manejar el caso cuando tipoActividad es 2 (si aplica)
$nombre_sesion=$registro->interno['datos_adicionales']['nombre_sesion']; // O el campo de nombre de sesión que corresponda
} else {
$nombre_sesion='';
}
return $nombre_sesion;
case 'AF':
case 'AH':
case 'AJ':
case 'AL':
case 'AN':
if ($tipoActividad == 2) {
// Manejar el caso cuando tipoActividad no es 1 ni 2
$dia=$registro->interno['datos_adicionales']['dia']; // O el campo de nombre de sesión que corresponda
} else {
$dia='';
}
return $dia;
case 'AO':
return $registro->interno['datos_adicionales']['profesional'];
case 'AP':
return $registro->interno['datos_adicionales']['observaciones'];
// case 'AQ':
// return $registro->interno['datos_adicionales']['nombre_completo'];
default:
return '';
}
}
private function escribirEnCelda($columna, $fila, $valor)
{
// $this->logger->debug("Escribiendo $valor en la celda $columna$fila");
// $this->sheet->setCellValue($columna . $fila, $valor);
try {
// $this->logger->info('My logger is now ready');
// $this->logger->debug("Escribiendo $valor en la celda $columna$fila");
$this->sheet->setCellValue($columna . $fila, $valor);
} catch (\Exception $e) {
$this->logger->error("Error al escribir en la celda: " . $e->getMessage());
}
}
private function getRangoColumnas($tipoActividad, $esNuevoRegistro)
{
if($tipoActividad==1){
return $esNuevoRegistro ? ['C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'AA', 'AB', 'AC', 'AD'] : ['O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'AA', 'AB', 'AC', 'AD'];
}else{
return $esNuevoRegistro ? ['C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'AE', 'AF', 'AG', 'AH','AI','AJ','AK','AL','AM','AN','AO','AP'] : ['AE', 'AF', 'AG', 'AH','AI','AJ','AK','AL','AM','AN'];
}
}
private function buscarFilaPorDni($dni)
{
// Suponiendo que el DNI estĆ” en la columna C
$highestRow = $this->sheet->getHighestRow();
// $this->logger->debug("Cuantas filas hay?: $highestRow");
for ($row = 2; $row <= $highestRow; $row++) {
if ($this->sheet->getCell('C' . $row)->getValue() == $dni) {
// $this->logger->debug("Fila del archivo Excel: $this->sheet->getCell('C' . $row)->getValue() == DNI: $dni");
$tipoActividad = 2; //$this->sheet->getCell('A' . $row)->getValue(); // Suponiendo que el tipo de actividad estĆ” en la columna A
$rangoColumnas = $this->getRangoColumnas($tipoActividad, false);
return [
'fila' => $row,
'rangoColumnas' => $rangoColumnas
];
}
// $this->logger->debug("DNI EXCEL: $this->sheet->getCell('C' . $row)->getValue() NO ES IGUAL A DNI SQL: $dni");
}
return false;
}
private function crearNuevaFila($fila, $registro, $rangoColumnas, $tipoActividad,$esNuevoRegistro) {
$columnaIndex = 0; // Inicializamos el Ćndice de columna
foreach ($rangoColumnas as $columna) {
if($tipoActividad=2 && $esNuevoRegistro==true){
if($columnaIndex==16 || $columnaIndex==17 || $columnaIndex==18 || $columnaIndex==19 || $columnaIndex==20 || $columnaIndex==21 || $columnaIndex==22){
// $this->logger->debug("columnaIndex: $columnaIndex - Pertenece a la columna: $columna ---> NO SE REGISTRO");
continue;
}
}else{
}
$valor = $this->obtenerValor($registro, $columna, $fila, $tipoActividad);
$this->escribirEnCelda($columna, $fila, $valor);
// $this->logger->debug("columnaIndex: $columnaIndex - Pertenece a la columna: $columna ---> valor = $valor ");
$columnaIndex++;
}
}
private function actualizarFila($fila, $registro, $rangosAdicionales)
{
$tipoActividad = 2; //$registro->tipo_actividad;
// Encontrar la primera columna vacĆa en el rango adicional
$columnaVacia = null;
// $this->logger->debug("Rango: $rangosAdicionales[$tipoActividad]");
foreach ($rangosAdicionales as $columna) {
if ($this->sheet->getCell( $columna. $fila)->getValue() === null) {
$columnaVacia = $columna;
// $this->logger->debug("Columna Vacia: $columnaVacia");
break;
}
}
// Si se encontró una columna vacĆa, escribir el valor
if ($columnaVacia) {
// $columnaIndex = array_search($columnaVacia, $rangosAdicionales);
// $this->logger->debug("ColumnaIndex: $columnaIndex ");
$sesion =$registro->interno['datos_adicionales']['nombre_sesion']; // $this->obtenerValor($registro, $columna);
$this->escribirEnCelda($columnaVacia, $fila, $sesion);
//aumentando en 1 para el dĆa
$columnaVacia++;
$dia=$registro->interno['datos_adicionales']['dia'];
$this->escribirEnCelda($columnaVacia, $fila, $dia);
}
}
public function generarReporte($registros, $rutaArchivo, $formatoArchivo,$tipoActividad)
{
// Cargar el archivo de formato
$this->spreadsheet = IOFactory::load($formatoArchivo);
$this->sheet = $this->spreadsheet->getActiveSheet();
$fila = 2;
foreach ($registros as $registro) {
$tipoActividad = $tipoActividad; //$registro->datos_adicionales['tipo_intervencion'];
// Determinar si es un nuevo registro o no ---> OK!
if ($this->buscarFilaPorDni($registro->interno['doc_identidad'])) {
$rangoColumnas = $this->getRangoColumnas($tipoActividad, false); // Registro existente
//hayando la fila repetida
// $dni = $registro->interno['doc_identidad'];
$resultadoBusqueda=$this->buscarFilaPorDni($registro->interno['doc_identidad'])['fila'];
// $this->logger->debug("FILA REPETIDA: ".$resultadoBusqueda);
// if (is_array($resultadoBusqueda)) {
// $filaExistente = $resultadoBusqueda[0];//$this->buscarFilaPorDni($dni)[0];
// $this->logger->debug("FILA REPETIDA: $filaExistente");
// }
$this->actualizarFila($resultadoBusqueda, $registro, $rangoColumnas);
// $this->logger->debug("BuscarFilaDNI devuelve estos valores: ".json_encode($this->buscarFilaPorDni($registro->interno['doc_identidad'])));
} else {
$rangoColumnas = $this->getRangoColumnas($tipoActividad, true); // Nuevo registro
$this->crearNuevaFila($fila, $registro, $rangoColumnas, $tipoActividad,true);
$fila++;
// $this->logger->debug("BuscarFilaDNI devuelve estos valores: ".$this->buscarFilaPorDni($registro->interno['doc_identidad']));
}
}
// Guardar el archivo Excel
$writer = IOFactory::createWriter($this->spreadsheet, 'Xlsx');
$writer->save($rutaArchivo);
}
}
// Consulta SQL
$sql = "SELECT I.doc_identidad AS 'DOCUMENTO_IDENTIDAD',
I.ape_nombres AS 'APELLIDOS_NOMBRES_INTERNO',
I.fecha_ingreso AS 'FECHA_INGRESO',
I.fecha_nacimiento AS 'FECHA_NACIMIENTO',
I.discapacidad AS 'discapacidad',
DET.planificacion_intervencion AS 'PLANIFICACION_INTERVENCION',
DET.tipo_intervencion AS 'TIPO_INTERVENCION',
GRUP.nombre_grupo_especifico AS 'GRUPO_ESPECIFICO',
DET.otros_relevantes AS 'OTROS_RELEVANTES',
REG.nombre_regimen AS 'REGIMEN',
ETP.nombre_etapa AS 'ETAPA',
I.pab_celda_etapa AS 'PABELLON',
DESCRIP.nombre_descripciones AS 'DESCRIPCION',
DET.dia_actividades AS 'dia',
SES.nombre_sesiones AS 'nombre_sesion',
CONCAT(U.ape_paterno,' ',U.ape_materno,' ',U.nombres) AS 'DATOS_PROFESIONAL',
DET.observaciones AS 'OBSERVACIONES',
DET.cambio_PTIP_PTIS AS 'PTI_P_PTI_S'
FROM reportes_mensuales REP
INNER JOIN detalle_reporte_mensual DET ON REP.id_reporte_mensual=DET.id_reporte_mensual
INNER JOIN grupo_especifico GRUP ON DET.id_grupo_especifico=GRUP.id_grupo_especifico
INNER JOIN internos I ON DET.id_interno=I.id_interno
INNER JOIN usuarios U ON REP.id_usuario=U.id_usuario
INNER JOIN regimen REG ON DET.id_regimen=REG.id_regimen
INNER JOIN etapa ETP ON DET.id_etapa=ETP.id_etapa
INNER JOIN descripciones DESCRIP ON DET.id_descripciones=DESCRIP.id_descripciones
INNER JOIN sessiones SES ON DET.id_sesiones=SES.id_sesiones
WHERE REP.id_reporte_mensual=:id_reporte"; // Tu consulta SQL completa
$rsDetalleReporte = $objCon->getConexion()->prepare($sql);
$rsDetalleReporte->bindParam(':id_reporte', $id_reporte, PDO::PARAM_INT);
if ($rsDetalleReporte->execute()) {
$registros = [];
while ($row = $rsDetalleReporte->fetch(PDO::FETCH_ASSOC)) {
$registro = new Registro($row);
$registros[] = $registro;
// Imprimir los datos del registro para verificar
// echo json_encode($registro)."<br>";
}
// Verificar si el array $registros estĆ” vacĆo
if (empty($registros)) {
echo "No se encontraron registros.";
} else {
// Generar el reporte
$generador = new GeneradorReporteExcel();
$generador->generarReporte($registros, 'reportes/reporte_actualizado.xlsx', 'reportes/FORMATO_SOCIAL.xlsx',$tipo_Actividad);
}
} else {
// Manejar errores en la ejecución de la consulta
echo "Error al ejecutar la consulta: " . $rsDetalleReporte->errorInfo()[2];
}
?>
I've already done it on my own. Using another library - SpaCy
MyCode:
import spacy
# ŠŠ°Š³ŃŃŠ¶Š°ŠµŠ¼ Š¼Š¾Š“ŠµŠ»Ń spaCy Š“Š»Ń ŃŃŃŃŠŗŠ¾Š³Š¾ ŃŠ·Ńка
nlp = spacy.load("ru_core_news_sm")
# ŠŃŠøŠ¼ŠµŠ½ŃŠµŠ¼ spaCy Š“Š»Ń ŃŠµŠ³Š¼ŠµŠ½ŃŠ°ŃŠøŠø
doc = nlp("ŠŠ°ŠŗŠ°Ń погоГа в ŠŠ¾Ńкве?")
# ŠŠµŃŠ°ŃŠ°ŠµŠ¼ Š²ŃŠµ Š½Š°Š¹Š“ŠµŠ½Š½ŃŠµ ŃŃŃŠ½Š¾ŃŃŠø
for ent in doc.ents:
print(ent.text, ent.label_)
for /f ātokens=2 delims=:ā %%a in (āfindstr ālines:ā .\reports\publish\coverage.xmlā) do set lines_coverage=%%a
for /f ātokens=2 delims=:ā %%a in (āfindstr āfunctions:ā .\reports\publish\coverage.xmlā) do set functions_coverage=%%a
for /f ātokens=2 delims=:ā %%a in (āfindstr ābranches:ā .\reports\publish\coverage.xmlā) do set branches_coverage=%%a`
:: Remove whitespace characters and check the values
set lines_coverage=!lines_coverage: =!
set functions_coverage=!functions_coverage: =!
set branches_coverage=!branches_coverage: =!`
:: Check if coverage is greater than 90% for lines, functions, and branches
for /f ādelims=.ā %%a in (ā!lines_coverage!ā) do set lines_coverage_int=%%a
for /f ādelims=.ā %%a in (ā!functions_coverage!ā) do set functions_coverage_int=%%a
for /f ādelims=.ā %%a in (ā!branches_coverage!ā) do set branches_coverage_int=%%a`
if !lines_coverage_int! geq 90 (
echo Lines coverage is !lines_coverage!%%.
) else (
echo Lines coverage is below 90%%: !lines_coverage!%%
goto :fail
)
This is my code. Is there any issues with the if and else statements
Subscription policies are defined for specific subscriptions, that is, between a specific Application and an API. If you want to restrict access to a particular API after a certain number of requests, you may configure an Advance policy from the Admin portal and add restrictions for your API from the Publisher portal.
Yes, in Shopify, you can update the password for a customer using the Storefront API but not directly via the Admin API. This process requires sending a password reset email to the customer, where they can update their password themselves. Shopify does not provide direct access to customer passwords through the Admin API due to security concerns.
Unfortunately you have to use and pay for IntelliJ IDEA Ultimate to have code completion for Node.js, as you can see in the comparison of IntelliJ IDEA Ultimate and IntelliJ IDEA Community Edition: https://www.jetbrains.com/products/compare/?product=idea&product=idea-ce
In the IntelliJ IDEA Community Edition code completion for Node.js and JavaScript is not supported.
In my case, in application.properties
changing spring.security.oauth2.client.registration.google.scope=openid, profile, email
to spring.security.oauth2.client.registration.google.scope=profile, email
solved the problem for me.
Debian 12
apt install libmagickcore-6.q16-dev
To access protected members of a class you need to use the keyword this
as shown in the TypeScript documentation.
you can check with global styling with SafeAreaProvider - To manage safe areas across your entire app, use https://www.npmjs.com/package/react-native-safe-area-context package, which provides SafeAreaProvider and SafeAreaView components. Wrap your entire app with SafeAreaProvider in your root component.
The purpose of SafeAreaView
is to render content within the safe area boundaries of a device.
So if you want to apply the background color everywhere, you may do this instead :
<View style={{ flex: 1, backgroundColor: "olive" }}>
<SafeAreaView style={{ flex: 1 }}>
{/* ... */}
</SafeAreaView>
</View>
Since your function is returning an object return {'--bg-color': props.bgm}
, it seems that this exact object is being passed as a backgroundImage
value. Background-image property cannot consume this value.
Try to return correct css background-image value of type string as per MDN documentation
Why do you need launching two PM2 instance?
PM2 can start process with different node version.
See my response to this question: https://stackoverflow.com/a/73266114/8807231
The solution works for me is, go to each pom file of submodules, right click, Run Maven -> Reimport
(and this could take a while).
This seems to work. I am unsure why my PivotFields name and Items are with the [].
Sub Filter_Single_Item()
Dim pt as PivotTable, pf As PivotField, ws As Worksheet
Set ws = ThisWorkbook.Worksheets ("Sheet1")
Set pt = ws.PivotTables("pvt_1")
Set pf = pt.PivotFields("[Table1].[filter1].[filter1]")
pf.ClearAllFilters
pf.VisibleItemsList = Array("[Table1].[filter1].&[item1]")
End Sub
You need to enable Keycloak integration by adding to your JAVA_ARGS
to artemis.profile: -Dhawtio.keycloakEnabled=true
See more detailed: JMS Security Keycloak Example
Intellisense ist not supported for inline JavaScript in Visual Studio Code. There is an open issue for this since 2017: https://github.com/microsoft/vscode/issues/26338
And a PR, which would resolve it (from January 2023): https://github.com/microsoft/vscode/pull/171547
So this would be the reason, why the variable defined in your inline JavaScript is not found in another JavaScript file.
I suggest looking at this other thread that has some other good answer that may help you.
To close this out, spoke to AWS Support and as of Feb 2024 this is not supported by Glue Schema Registry but is on the roadmap.
If you want to do imports, you will have to use a different schema registry otherwise you have to bake in everything you want to import.
This might be caused by the false folder permissions. In my case I needed to give 775
to my image folders in photos
folder:
chmod -R 775 photos/*
I agree with @guillaume blaquiere, Cloud Source repository 2nd gen does not exist.
This is the documentation with regards to Cloud Build repositories (2nd gen).
With Cloud Build repositories (2nd gen), you can create and manage repository connections programmatically. You can set up a single connection for a repository and use Secret Manager secrets from that connection to programmatically set up additional connections across regions and projects. You can also set up connections using Terraform, in addition to the Google Cloud console, gcloud command-line tool, and the API. You must create a host connection prior to linking repositories when using Cloud Build repositories (2nd gen).
Cloud Build repositories (2nd gen) can be used with the following providers:
You can invoke builds on commits and pull requests. You can also invoke builds manually, on a Pub/Sub topic, or on an incoming webhook event.
In case anyone is still looking for this today, check out Ferrostar. We've learned a lot about how (not) to build apps and SDKs over the last decade. With Ferrostar, we're trying to optimize for extensibility (every navigation use case is slightly different) and composability (you can swap out just about everything from core behaviors like off-route detection all the way up to the UI).
I think you use wrap_content height in CardView that's why and you use a Cardview as a main layout
My situation when I ran into this problem is different than other answers. All my settings were user settings. However, I wanted the default values to be empty or null so I gave none of the properties a default value.
The save didn't work until I did give them a default value.
@Mohamad's answer is on point, but if you want smoother animation and a shorter movement, try that:
.box {
transition: all .3s ease-in-out;
}
.box:hover {
transform: translateY(-5px);
}
-- Explanation:
Step 4.2 in this guide: https://apim.docs.wso2.com/en/latest/install-and-setup/setup/setting-up-proxy-server-and-the-load-balancer/configuring-the-proxy-server-and-the-load-balancer/#step-4-configure-the-dynamic-callback-origin
Default value of "forwardedHeader"
is "X-Forwarded-For"
, need change to "X-Forwarded-Host"
SELECT * FROM test_table WHERE primary_id = 1 FOR UPDATE;
if you copy project with env folder from another computer, you must change VIRTUAL_ENV in som files and correct any absolute address from another system.
1- pyvenv.cfg : in root of venv folder
2- activate.bat : venv/Scripts
3- Activate.ps1 : venv/Scripts
in Activate.ps1 maby don't need any change
I had generated my cert with an EC key and Azure didn't like it. The file format was correct but nothing worked until I regenerated with an RSA key.
Dont use
FROM nginx:latest
use this
FROM debial:latest
and
&& apt-get install -y nginx \
With python 3.13 "fig" is deprecated use "figure".
df = sns.load_dataset('iris')
sns_plot = sns.pairplot(df, hue='species', size=2.5)
fig = sns_plot.figure
fig.savefig('file.jpg')
you can update your stack option styles:
screenOptions={{ navigationBarColor: 'white' }}> // update color
I am still facing the same error the poster faced, i have tried the solutions provided after the post but still i get the cmdline error
I'd like to follow-up on Matt Warrick's response by saying that VSCode marked my requirements.txt file itself as UTF-16 (by auto-guessing). You can see how VSCode decodes the file by checking this section at the IDE's bottom-right corner:
Did you resolve the issue? I have a same issue for ios only. Android works fine.
struggled so much with the same issue, the thing is with expo there's no need of NativeContainer element , just <Drawer.Navigator /> is enough no need to wrap it inside NativeContainer/
This related issue will happen due to the failure to handle the async operation properly.
Click on more options for AVD devices, you will see this screen, and click on the bug report menu. It will start to collect bugs and give you a brief idea. Please let me know if it is helpful for you.
I found the code causing the program to crash out. In the class ActivationService I commented out the line relating the to the theme selector service. After some Testing I have not seen any negative effects. (Switching between light and dark themes still works.
private async Task InitializeAsync()
{
//await _themeSelectorService.InitializeAsync().ConfigureAwait(false);
await Task.CompletedTask;
}
To answer my own question, I managed this with one queue in the end, just filtering the s3 files with the glob filter
try to add initializer in remember section as well:
var name by remember(inventoryItem.name ?: "") { mutableStateOf(inventoryItem.name ?: "") }
The free plan limits are very limited, and you probably need to upgrade to a Paid plan. You can do it at the API Keys page of the Google AI Studio.
Try disabling the VerifyTests diff runner.
DiffRunner.Disabled = true;
And in case anyone still couldn't access to their enums, even though your schema has been generated.
It's because prisma won't generate enum types if you don't use it in other table columns. "For GraphQL reasons"
How did I solve it?!
prisma generate
and I'm good to go.I am using these
filter: drop-shadow(100vw 0 0 #5e8141);
transform: translateX(-100vw);
In your firts example, youāre assigning the background color of the to the variable x but modifying x doesnāt affect the element because it simply reassigns x, without updating the actual style of the page.
This is a loaded question! Start by fixing the isEdit
logic using the NovaRequest
to properly detect if youāre in update
or detail
mode:
$isEdit = $request->isUpdateOrUpdateAttachedRequest() || $request->isResourceDetailRequest();
Zscaler is providing here documentation to add custom certificates for some applications (such as cURL) and languages.
datetime.timedelta is useful for this type of thing:
import dateutil
import pytz
from datetime import timedelta
utc = pytz.UTC
print(utc.localize(dateutil.parser.parse('2024-10-31'))+timedelta(days=1)-timedelta(seconds=1))
In distributed mode the individual jmeter workers send the samples to the jmeter master/controller. Any backend listener configured on the jmeter workers are ignored. The backend listener must be configured on the jmeter master in order for you to send metrics to influxdb. This means must configure the influxdb backend listener in the test plan itself BEFORE execute your test.
Since Django 5.1, this is automatically handled by the querystring
template tag. Documentation:
https://docs.djangoproject.com/en/5.1/ref/templates/builtins/#querystring
If your database is supporting analytical functions, you can consolidate into unique pairs before calculating count.
SELECT
LEAST("from", "to") AS user1,
GREATEST("from", "to") AS user2,
COUNT(*) AS count
FROM
messages
GROUP BY
LEAST("from", "to"),
GREATEST("from", "to");
I found an answer thanks to the hint from @acw1668;
I adjusted the <Configure>
command to check the size of the scrollregion
and compare it to the parent ScrollableFrame
class window. If it is smaller, I change the scroll region to be the size of the ScrollableFrame
.
Here are the adjustments I made to my class, including changing the window anchor to sit at the top left of the frame;
class ScrollableFrame(ttk.Frame):
"""
"""
def __init__(self, parent):
"""
"""
...
self.scrollableFrame.bind("<Configure>", self.update_scroll_region)
...
self.canvas.create_window((0, 0), window=self.scrollableFrame, anchor='nw')
def update_scroll_region(self, event):
bbox = self.canvas.bbox('all')
sfHeight = self.winfo_height()
if (bbox[3] - bbox[1]) < sfHeight:
newBbox = (bbox[0], 0, bbox[2], sfHeight)
self.canvas.configure(scrollregion=newBbox)
else:
self.canvas.configure(scrollregion=self.canvas.bbox("all"))