Page 53«..1020..52535455..6070..»

The Machine Learning Guide for Predictive Accuracy: Interpolation and Extrapolation – Towards Data Science

class ModelFitterAndVisualizer: def __init__(self, X_train, y_train, y_truth, scaling=False, random_state=41): """ Initialize the ModelFitterAndVisualizer class with training and testing data.

Parameters: X_train (pd.DataFrame): Training data features y_train (pd.Series): Training data target y_truth (pd.Series): Ground truth for predictions scaling (bool): Flag to indicate if scaling should be applied random_state (int): Seed for random number generation """ self.X_train = X_train self.y_train = y_train self.y_truth = y_truth

self.initialize_models(random_state)

self.scaling = scaling

# Initialize models # ----------------------------------------------------------------- def initialize_models(self, random_state): """ Initialize the models to be used for fitting and prediction.

Parameters: random_state (int): Seed for random number generation """

# Define kernel for GPR kernel = 1.0 * RBF(length_scale=1.0) + WhiteKernel(noise_level=1.0)

# Define Ensemble Models Estimator # Decision Tree + Kernel Method estimators_rf_svr = [ ('rf', RandomForestRegressor(n_estimators=30, random_state=random_state)), ('svr', SVR(kernel='rbf')), ] estimators_rf_gpr = [ ('rf', RandomForestRegressor(n_estimators=30, random_state=random_state)), ('gpr', GaussianProcessRegressor(kernel=kernel, normalize_y=True, random_state=random_state)) ] # Decision Trees estimators_rf_xgb = [ ('rf', RandomForestRegressor(n_estimators=30, random_state=random_state)), ('xgb', xgb.XGBRegressor(random_state=random_state)), ]

self.models = [ SymbolicRegressor(random_state=random_state), SVR(kernel='rbf'), GaussianProcessRegressor(kernel=kernel, normalize_y=True, random_state=random_state), DecisionTreeRegressor(random_state=random_state), RandomForestRegressor(random_state=random_state), xgb.XGBRegressor(random_state=random_state), lgbm.LGBMRegressor(n_estimators=50, num_leaves=10, min_child_samples=3, random_state=random_state), VotingRegressor(estimators=estimators_rf_svr), StackingRegressor(estimators=estimators_rf_svr, final_estimator=RandomForestRegressor(random_state=random_state)), VotingRegressor(estimators=estimators_rf_gpr), StackingRegressor(estimators=estimators_rf_gpr, final_estimator=RandomForestRegressor(random_state=random_state)), VotingRegressor(estimators=estimators_rf_xgb), StackingRegressor(estimators=estimators_rf_xgb, final_estimator=RandomForestRegressor(random_state=random_state)), ]

# Define graph titles self.titles = [ "Ground Truth", "Training Points", "SymbolicRegressor", "SVR", "GPR", "DecisionTree", "RForest", "XGBoost", "LGBM", "Vote_rf_svr", "Stack_rf_svr__rf", "Vote_rf_gpr", "Stack_rf_gpr__rf", "Vote_rf_xgb", "Stack_rf_xgb__rf", ]

def fit_models(self): """ Fit the models to the training data.

Returns: self: Instance of the class with fitted models """ if self.scaling: scaler_X = MinMaxScaler() self.X_train_scaled = scaler_X.fit_transform(self.X_train) else: self.X_train_scaled = self.X_train.copy()

for model in self.models: model.fit(self.X_train_scaled, self.y_train) return self

def visualize_surface(self, x0, x1, width=400, height=500, num_panel_columns=5, vertical_spacing=0.06, horizontal_spacing=0, output=None, display=False, return_fig=False): """ Visualize the prediction surface for each model.

Parameters: x0 (np.ndarray): Meshgrid for feature 1 x1 (np.ndarray): Meshgrid for feature 2 width (int): Width of the plot height (int): Height of the plot output (str): File path to save the plot display (bool): Flag to display the plot """

num_plots = len(self.models) + 2 num_panel_rows = num_plots // num_panel_columns

whole_width = width * num_panel_columns whole_height = height * num_panel_rows

specs = [[{'type': 'surface'} for _ in range(num_panel_columns)] for _ in range(num_panel_rows)] fig = make_subplots(rows=num_panel_rows, cols=num_panel_columns, specs=specs, subplot_titles=self.titles, vertical_spacing=vertical_spacing, horizontal_spacing=horizontal_spacing)

for i, model in enumerate([None, None] + self.models): # Assign the subplot panels row = i // num_panel_columns + 1 col = i % num_panel_columns + 1

# Plot training points if i == 1: fig.add_trace(go.Scatter3d(x=self.X_train[:, 0], y=self.X_train[:, 1], z=self.y_train, mode='markers', marker=dict(size=2, color='darkslategray'), name='Training Data'), row=row, col=col)

surface = go.Surface(z=self.y_truth, x=x0, y=x1, showscale=False, opacity=.4) fig.add_trace(surface, row=row, col=col)

# Plot predicted surface for each model and ground truth else: y_pred = self.y_truth if model is None else model.predict(np.c_[x0.ravel(), x1.ravel()]).reshape(x0.shape) surface = go.Surface(z=y_pred, x=x0, y=x1, showscale=False) fig.add_trace(surface, row=row, col=col)

fig.update_scenes(dict( xaxis_title='x0', yaxis_title='x1', zaxis_title='y', ), row=row, col=col)

fig.update_layout(title='Model Predictions and Ground Truth', width=whole_width, height=whole_height)

# Change camera angle camera = dict( up=dict(x=0, y=0, z=1), center=dict(x=0, y=0, z=0), eye=dict(x=-1.25, y=-1.25, z=2) ) for i in range(num_plots): fig.update_layout(**{f'scene{i+1}_camera': camera})

if display: fig.show()

if output: fig.write_html(output)

if return_fig: return fig

More here:

The Machine Learning Guide for Predictive Accuracy: Interpolation and Extrapolation - Towards Data Science

Read More..

60 Power BI Interview Questions and Expert Answers for 2024 – Simplilearn

Back in 2011, the rise of Business Intelligence tools posed a challenge to Microsoft to build its own business intelligence tool. Microsoft introduced the Power BI to deliver compelling analytical capabilities to existing Microsoft Excel and upgrade it to be intelligent enough to generate interactive reports.

According to Gartner's Magic Quadrant, Microsoft Power BI is one of todays top business intelligence tools, chiefly because most IT firms rely on Power BI for their business analytics. As a result, the current IT industry finds a massive demand for Power BI Experts.

This tutorial is solely dedicated to helping aspiring Power BI professionals grasp the essential fundamentals of Power BI and crack the interviews in real-time. The tutorial is organized based on three categories, outlined below.

We have five dozen questions for you, so lets begin by going through some refresher-level or frequently asked beginner-level Power BI interview questions.

Power BI is a business analytics tool developed by Microsoft that helps you turn multiple unrelated data sources into valuable and interactive insights. These data may be in the form of an Excel spreadsheet or cloud-based/on-premises hybrid data warehouses. You can easily connect to all your data sources and share the insights with anyone.

Because Power BI provides an easy way for anyone, including non-technical people, to connect, change, and visualize their raw business data from many different sources and turn it into valuable data that makes it easy to make smart business decisions.

Both Tableau and Power BI are the current IT industry's data analytics and visualization giants. Yet, there are a few significant differences between them. You will now explore the important differences between Tableau and Power BI.

Tableau uses MDX for measures and dimensions

Power BI uses DAX for calculating measures

Tableau is capable of handling large volumes of data

Power BI is qualified only to handle a limited amount of data

Tableau is best suitable for experts

Power BI is suitable for both experts and beginners

Tableau User Interface is complicated

Power BI User Interface is comparatively simpler

Tableau is capable of supporting the cloud with ease.

Power BI finds it difficult, as its capacity to handle large volumes of data is limited.

The differences between Power Query and Power Pivot are explained as follows:

Power Query is all about analyzing data.

Power Pivot is all about getting and Transforming data.

Power Query is an ETL service tool.

Power Pivot is an in-memory data modeling component

Power BI Desktop is an open-source application designed and developed by Microsoft. Power BI Desktop will allow users to connect to, transform, and visualize your data with ease. Power BI Desktop lets users build visuals and collections of visuals that can be shared as reports with your colleagues or your clients in your organization.

Power Pivot is an add-on provided by Microsoft for Excel since 2010. Power Pivot was designed to extend the analytical capabilities and services of Microsoft Excel.

Power Query is a business intelligence tool designed by Microsoft for Excel. Power Query allows you to import data from various data sources and will enable you to clean, transform and reshape your data as per the requirements. Power Query allows you to write your query once and then run it with a simple refresh.

Self-service business intelligence (SSBI) is divided into the Excel BI Toolkit and Power BI.

SSBI is an abbreviation for Self-Service Business Intelligence and is a breakthrough in business intelligence. SSBI has enabled many business professionals with no technical or coding background to use Power BI and generate reports and draw predictions successfully. Even non-technical users can create these dashboards to help their business make more informed decisions.

DAX stands for Data Analysis Expressions. It's a collection of functions, operators, and constants used in formulas to calculate and return values. In other words, it helps you create new info from data you already have.

The term "Filter" is self-explanatory. Filters are mathematical and logical conditions applied to data to filter out essential information in rows and columns. The following are the variety of filters available in Power BI:

Custom Visuals are like any other visualizations, generated using Power BI. The only difference is that it developes the custom visuals using a custom SDK. The languages like JQuery and JavaScript are used to create custom visuals in Power BI.

Get Data is a simple icon on Power BI used to import data from the source.

Some of the advantages of using Power BI:

Here are some limitations to using Power BI:

Power Pivot for Excel supports only single directional relationships (one to many), calculated columns, and one import mode. Power BI Desktop supports bi-directional cross-filtering connections, security, calculated tables, and multiple import options.

There are three main connectivity modes used in Power BI.

An SQL Server Import is the default and most common connectivity type used in Power BI. It allows you to use the full capabilities of the Power BI Desktop.

The Direct Query connection type is only available when you connect to specific data sources. In this connectivity type, Power BI will only store the metadata of the underlying data and not the actual data.

With this connectivity type, it does not store data in the Power BI model. All interaction with a report using a Live Connection will directly query the existing Analysis Services model. There are only 3 data sources that support the live connection method - SQL Server Analysis Services (Tabular models and Multidimensional Cubes), Azure Analysis Services (Tabular Models), and Power BI Datasets hosted in the Power BI Service.

Four important types of refresh options provided in Microsoft Power BI are as follows:

Several data sources can be connected to Power BI, which is grouped into three main types:

It can import data from Excel (.xlsx, .xlxm), Power BI Desktop files (.pbix) and Comma-Separated Values (.csv).

These are a collection of related documents or files stored as a group. There are two types of content packs in Power BI:

Connectors help you connect your databases and datasets with apps, services, and data in the cloud.

A dashboard is a single-layer presentation sheet of multiple visualizations reports. The main features of the Power BI dashboard are:

Relationships between tables are defined in two ways:

No. There can be multiple inactive relationships, but only one active relationship between two tables in a Power Pivot data model. Dotted lines represent inactive relationships, and continuous lines represent active relationships.

Yes. There are two main reasons why you can have disconnected tables:

The CALCULATE function evaluates the sum of the Sales table Sales Amount column in a modified filter context. It is also the only function that allows users to modify the filter context of measures or tables.

Moving ahead, you will step up to the following Power BI Interview Questions from the Intermediate Level.

Most of the time, power BI gets assisted by the cloud to store the data. Power BI can use a desktop service. Microsoft Azure is used as the primary cloud service to store the data.

Row-level security limits the data a user can view and has access to, and it relies on filters. Users can define the rules and roles in Power BI Desktop and also publish them to Power BI Service to configure row-level security.

Users can use general formatting to make it easier for Power BI to categorize and identify data, making it considerably easier to work with.

There are three different views in Power BI, each of which serves another purpose:

Report View - In this view, users can add visualizations and additional report pages and publish the same on the portal.

Data View - In this view, data shaping can be performed using Query Editor tools.

Model View - In this view, users can manage relationships between complex datasets.

The important building blocks of Power BI are as follows:

Visualization is the process of generating charts and graphs for the representation of insights on business data.

A dataset is the collection of data used to create a visualization, such as a column of sales figures. Dataset can get combined and filtered from a variety of sources via built-in data plugins.

The final stage is the report stage. Here, there is a group of visualizations on one or more pages. For example, charts and maps are combined to make a final report.

A Power BI dashboard helps you to share a single visualization with colleagues and clients to view your final dashboard.

A tile is an individual visualization on your final dashboard or one of your charts in your final report.

The critical components of Power BI are mentioned below.

A content pack is defined as a ready-made collection of visualizations and Power BI reports using your chosen service. You'd use a content pack when you want to get up and running quickly instead of creating a report from scratch.

Bidirectional cross-filtering lets data modelers to decide how they want their Power BI Desktop filters to flow for data, using the relationships between tables. The filter context is transmitted to a second related table that exists on the other side of any given table relationship. This procedure helps data modelers solve the many-to-many issue without having to complicated DAX formulas. So, to sum it up, bidirectional cross-filtering makes the job for data modelers easier.

This is how the formula is writtenthat is, the elements that comprise it. The Syntax includes functions such as SUM (used when you want to add figures). If the Syntax isn't correct, you'll get an error message.

These are formulas that use specific values (also known as arguments) in a particular order to perform a calculation, similar to the functions in Excel. The categories of functions are date/time, time intelligence, information, logical, mathematical, statistical, text, parent/child, and others.

There are two types: row context and filter context. Row context comes into play whenever a formula has a function that applies filters to identify a single row in a table. When one or more filters are applied in a calculation that determines a result or value, the filter context comes into play.

You will use a custom visual file if the prepackaged files don't fit the needs of your business. Developers create custom visual files, and you can import them and use them in the same way as you would the prepackaged files.

A few familiar data sources are Excel, Power BI datasets, web, text, SQL server, and analysis services.

Power BI Desktop helps you to group the data in your visuals into chunks. You can, however, define your groups and bins. For grouping, use Ctrl + click to select multiple elements in the visual. Right-click one of those elements and, from the menu that appears, choose Group. In the Groups window, you can create new groups or modify existing ones.

On a Power BI final report page, a developer can resize a responsive slicer to various sizes and shapes, and the data collected in the container will be rearranged to find a match. If a visual report becomes too small to be useful, an icon representing the visual takes its place, saving space on the report page.

Query folding is used when steps defined in the Query Editor are translated into SQL and executed by the source database instead of your device. It helps with scalability and efficient processing.

M is a programming language used in Power Query as a functional, case-sensitive language similar to other programming languages and easy to use.

Visual-level filters are used to filter data within a single visualization. Page-level filters are used to work on an entire page in a report, and different pages can have various filters.

Report-level filters are used to filter all the visualizations and pages in the report.

Users can set up for an automatic refresh over data based on daily or weekly requirements. Users can schedule only one refresh maximum daily unless they have Power BI Pro. The Schedule Refresh section uses the pull-down menu choices to select a frequency, time zone, and time of day.

Power Map can display geographical visualizations. Therefore, some location data is neededfor example, city, state, country, or latitude and longitude.

Power Pivot uses the xVelocity engine. xVelocity can handle huge amounts of data, storing data in columnar databases. All data gets loaded into RAM memory when you use in-memory analytics, which boosts the processing speed.

Following are some of the important Components of SSAS:

An OLAP Engine is used to extensively run the ADHOC queries at a faster pace by the end-users

It describes data Drilling in SSAS as the process of exploring details of the data with multiple levels of granularity.

The data Slicing process in SSAS is defined as the process of storing the data in rows and columns.

Pivot Tables helps in switching between the different categories of data stored between rows and columns

Power BI is available mainly in three formats, as mentioned below.

There are three different stages in working on Power BI, as explained below.

The primary step in any business intelligence is to establish a successful connection with the data source and integrate it to extract data for processing.

The next step in business intelligence is data processing. Most of the time, the raw data also includes unexpected erroneous data, or sometimes a few data cells might be empty. The BI tool needs to interpret the missing values and inaccurate data for processing in the data processing stage.

The final stage in business intelligence is analyzing the data got from the source and presenting the insights using visually appealing graphs and interactive dashboards.

Beginners and experts prefer Power BI in business intelligence. Power BI is used mainly by the following professionals.

A business analyst is a professional who analyses the business data and represents the insights found using visually appealing graphs and dashboards

Business owners, decision-makers, or organizations use Power BI to view the insights and understand the prediction to make a business decision.

Business Developers are just software developers who get hired for business purposes to develop custom applications and dashboards to help the business process be smooth.

Advanced editor is used to view queries that Power BI is running against the data sources importing data. The query is rendered in M-code. Users wanting to view the query code select Edit Queries from the Home tab, then click on Advanced Editor to perform work on the query. Any changes get saved to Applied Steps in the Query Settings.

Gateways function as bridges between the in-house data sources and Azure Cloud Services.

There are multiple applications of Power BI; some of them are as follows:

Every individual chart or visualization report generated is collected and represented on a single screen. Such an approach is called a Power BI Dashboard. A Dashboard in Power BI is used to depict a story.

KPI is abbreviated as Key Performance Indicator. Any professional organization has teams and employees follow the KPI protocols. The organizations set up KPIs for all the employees. These KPIs act as their targets. These KPIs are compared to previous performance and analyze the progress.

Slicers are an integral part of a business report generated using Power BI. The functionality of a slicer can be considered similar to that of a filter, but, unlike a filter, a Slicer can display a visual representation of all values and users will be provided with the option to select from the available values in the slicers drop-down menu.

It is a combined solution offered to upload the reports and dashboards to the PowerBI.com website for reference. It consists of Power Pivot, Power Query, and Power Table.

Read this article:

60 Power BI Interview Questions and Expert Answers for 2024 - Simplilearn

Read More..

Register to host an event at Qiskit Fall Fest 2024! – IBM

Key dates for prospective event hosts:

August 7: Deadline to sign up for event host informational sessions and Qiskit Fall Fest mailing list

August 15: Informational session

August 16: Informational session

August 22: Deadline for event host applications

August 27: Application decisions to be announced

September 3: Qiskit Fall Fest 2024 event lineup to be announced to the public

October-November: Qiskit Fall Fest events take place

Since 2021, the Qiskit Fall Fest has brought together quantum enthusiasts of all backgrounds and experience levels for a worldwide celebration of quantum technology, research, and collaboration. Spearheaded primarily by student leaders and taking place on university campuses all around the globe, Qiskit Fall Fest gives participants a unique opportunity to engage with the Qiskit community and even get hands-on experience with real quantum computers. Now, the event series is gearing up to return for its fourth annual installment, which will kick off in October.

Qiskit Fall Fest is a collection of quantum computing events that invites students, researchers and industry professionals around the world to participate in a wide array of quantum-themed activities, ranging from quantum challenges, hackathons, and coding competitions to workshops, social events, and more. With each Qiskit Fall Fest, we partner with a select group of university students and other volunteer hosts to help them plan and run the global roster of Fall Fest events. This year's event theme, World of Quantum, celebrates the international scope of the event series and the rapid growth of the global quantum community.

Last years Qiskit Fall Fest engaged over 4,000 participants with the help of 95 event hosts all working alongside IBM Quantum to grow their local quantum communities. We hope to see even more participants in 2024!

Were looking for volunteers located all around the world to host their very own events as part of the Qiskit Fall Fest lineup. Anyone who has a passion for quantum computing is eligible to host a Fall Fest event. (See the next section of this post for more details on host eligibility.)

Interested in joining the fun? Click this link to register for one of the Qiskit Fall Fest informational sessions well be holding this summer for prospective event hosts.

The informational sessions will take place on Thursday, August 15 and Friday, August 16, and will give prospective event hosts valuable insights into the requirements and time commitment involved with running a Qiskit Fall Fest event.

If youd like to participate in Qiskit Fall Fest but dont plan on hosting an event, you can also use the same registration link to sign up for the Qiskit Fall Fest mailing list, which will keep you up-to-date with all the latest details on this years events.

Please submit all registrations for the Qiskit Fall Fest informational sessions and/or mailing list by Wednesday, August 7.

After the informational sessions, prospective event hosts will submit applications detailing their background and expertise in quantum computing. Applications will be due the week after the information sessions, and decisions will be announced the week after that. Be sure to check the sidebar at the top of this page for all key dates.

The full roster of Qiskit Fall Fest 2024 events will be announced to the public in early September, and the events themselves will take place in October and November.

Most Qiskit Fall Fest events take place on university campuses and are led by university students though there are certainly some exceptions. Weve intentionally put students at the forefront of the Qiskit Fall Fest event series since its initial launch in 2021. Thats because we believe the student leaders of today will be the quantum industry leaders of tomorrow. With the Qiskit Fall Fest, we aim to give students an opportunity to put their leadership skills to the test and help grow the quantum community using resources and guidance from IBM.

At the same time, anyone can participate in and even host a Qiskit Fall Fest event. Dont have access to a university campus? No problem! In the past, weve had high school students, recent graduates, and even industry professionals host events that take place virtually and in other appropriate settings. Just be sure to register for the informational sessions by August 7 and submit your idea for an event by August 22. If its a fit, well work with you to bring it to life. (Please note: Only those who attend one of the informational sessions will receive access to the event host application.)

Click here to register for the mailing list and informational sessions.

See the original post:
Register to host an event at Qiskit Fall Fest 2024! - IBM

Read More..

Scientists invent tiny device that creates ice-cold electricity for quantum computers – Study Finds

The LANES lab's 2D device made of graphene and indium selenide ( Alain Herzog)

LAUSANNE, Switzerland Scientists have created a miniature 2D device that can convert heat into electricity with record-breaking efficiency at temperatures lower than in outer space! This breakthrough could revolutionize how we power sensitive quantum computers and explore exotic physics in extremely cold environments.

In the journalNature Nanotechnology, a team of researchers from Switzerland and Japan revealed their electrically-tunable Nernst effect device made from atomically-thin layers of different materials stacked together. Their tiny chip, measuring just micrometers across, can generate useful electrical signals from small temperature differences even at a frigid 100 millikelvin just a fraction of a degree above absolute zero.

The device takes advantage of the Nernst effect, where a voltage is generated perpendicular to both a temperature gradient and magnetic field in certain materials. While this effect has been known for over a century, making it work well in extreme cold has been an ongoing challenge until now.

We are the first to create a device that matches the conversion efficiency of current technologies, but that operates at the low magnetic fields and ultra-low temperatures required for quantum systems. This work is truly a step ahead, says Gabriele Pasquale, a PhD student at EPFLs Laboratory of Nanoscale Electronics and Structures (LANES), in a media release.

The key to the teams success was carefully combining different two-dimensional materials into a van der Waals heterostructure essentially a stack of ultra-thin layers held together by weak atomic forces.

They started with a base layer of graphene a single-atom-thick sheet of carbon with excellent electrical properties. On top of this, they placed a few layers of indium selenide (InSe), a semiconductor with intriguing thermoelectric characteristics. The whole stack was then encapsulated in insulating layers of hexagonal boron nitride for protection.

The researchers fabricated their devices using advanced clean-room techniques to ensure the highest quality and purity of materials. They then cooled the chips down to just above absolute zero in a special refrigerator called a dilution fridge.

To test the devices, the team used a focused laser to create localized heating and sophisticated electronic measurements to detect the resulting signals. They also applied magnetic fields and varied the electrical charge in the device using additional electrodes.

The team observed a Nernst effect signal that could be switched on and off electrically with an unprecedented ratio of 1,000 to 1. This means the device can be precisely controlled using standard electronic components.

Even more impressively, they measured a Nernst coefficient a measure of the strength of the effect of 66.4 microvolts per kelvin per tesla. This is the highest value ever reported at such low temperatures and modest magnetic fields.

The researchers also found that their heterostructure design amplified the Nernst effect compared to using graphene or indium selenide alone. This synergistic enhancement points to new ways of engineering improved thermoelectric materials.

If you think of a laptop in a cold office, the laptop will still heat up as it operates, causing the temperature of the room to increase as well. In quantum computing systems, there is currently no mechanism to prevent this heat from disturbing the qubits. Our device could provide this necessary cooling, Pasquale explains.

This breakthrough has significant implications for both fundamental physics and practical applications. On the basic science side, it provides a new tool for probing exotic quantum states of matter that only emerge at ultra-low temperatures.

On the applied side, the technology could find use in quantum computing, where precise control of heat flow is critical. It might enable new types of quantum sensors or help manage waste heat in superconducting circuits.

The team is now working to further optimize their devices and explore different material combinations. Theyre also investigating how to scale up production for practical applications.

These findings represent a major advancement in nanotechnology and hold promise for developing advanced cooling technologies essential for quantum computing at millikelvin temperatures, Pasquale concludes. We believe this achievement could revolutionize cooling systems for future technologies.

See the original post:
Scientists invent tiny device that creates ice-cold electricity for quantum computers - Study Finds

Read More..

The 3 Best Quantum Computing Stocks to Buy in July 2024 – InvestorPlace

Analysts state that the financial services sector is expected to see significant benefits from quantum computing through enhanced portfolio optimization and fraud detection capabilities. Throughout all of this, the healthcare industry is anticipated to leverage quantum computing for drug discovery and personalized medicine. As the technology evolves, we will see a transition from physical qubits to more stable, error-corrected logical qubits. This will enhance the reliability of quantum computations. So now could be a great time for investors to consider these quantum computing stocks to buy.

Here are three companies to consider.

Source: Amin Van / Shutterstock.com

IonQ (NYSE:IONQ) is a leading developer of quantum computing systems and software. The company uses trapped ion technology to create quantum computers with high-fidelity qubits and low error rates.

There are a few reasons Im bullish on IONQ. The average analyst rating for IonQ is a Buy. The consensus price target is $16.50. Furthermore, this representing a potential upside of 141.23% from the current stock price.

Also, analysts expect IonQs revenue to grow at a rapid pace, with a 5-year revenue growth forecast of 95.40%. If the company can maintain this growth trajectory and improve its profitability, the stocks valuation may become more attractive over time.

IonQ also has a strong balance sheet with a net cash position of $359.72 million, or $1.70 per share. Moreover, this provides the company with financial flexibility to invest in growth opportunities and weather potential challenges. Also, this is hedging against short-term fears of shareholder dilution through the issuance of new shares.

Source: Bartlomiej K. Wroblewski / Shutterstock.com

D-Wave Quantum (NYSE:QBTS) is a leader in quantum computing systems, software, and services. People know this company for its quantum annealing technology.

I think that QBTS could be one of the frontrunners in the quantum computing arms race for a few reasons.

QBTS launched the fast-anneal feature. This is now available on all of its quantum processing units (QPUs) in the Leap real-time quantum cloud service. Furthermore, this feature allows users to perform quantum computations at unprecedented speeds. As a result, this will significantly reduce the impact of external disturbances such as thermal fluctuations and noise.

In the press release, QBTS states that the fast-anneal feature has generated significant interest from commercial and academic researchers who are eager to leverage its capabilities for building world-class applications, expanding benchmarking studies, and exploring the potential benefits of increased coherence in various industrial applications.

In terms of financial forecasts, D-Wave Quantums revenue may grow by 46.02% this year to $12.79 million, and by 101.46% next year to $25.76 million. While the company is still expected to report losses in the coming years, the magnitude of those losses is projected to decrease

Source: Boykov / Shutterstock.com

Rigetti Computing (NASDAQ:RGTI) is a pioneer in quantum computing, offering full-stack quantum-classical computing services. The company provides its services through Rigetti Quantum Cloud Services, serving global enterprise, government, and research clients.

The average analyst rating for RGTI is strong buy, with a consensus price target of $3.17, representing a potential upside of 217.41% from the current stock price of around $1 at the time of writing.

Also, RGTIs revenue is expected to grow by 29.96% this year to $15.61 million and by 88.86% next year to $29.47 million. The companys 5-year revenue growth forecast is 70.72%, indicating a strong growth trajectory.

This optimism is reflected in RGTIs valuation, RGTI has a high price-to-sales (PS) ratio of 13 times sales and a forward PS ratio of 10, so the market is pricing in some very strong expectations for the company. This companys valuation though is slightly on the smaller side compared with its peers in the industry, which could mean it may be an undervalued opportunity also.

On the date of publication, the responsible editor did not have (either directly or indirectly) any positions in the securities mentioned in this article.

On the date of publication, Matthew Farley did not have (either directly or indirectly) any positions in the securities mentioned in this article. The opinions expressed are those of the writer, subject to the InvestorPlace.com Publishing Guidelines.

Matthew started writing coverage of the financial markets during the crypto boom of 2017 and was also a team member of several fintech startups. He then started writing about Australian and U.S. equities for various publications. His work has appeared in MarketBeat, FXStreet, Cryptoslate, Seeking Alpha, and the New Scientist magazine, among others.

Follow this link:
The 3 Best Quantum Computing Stocks to Buy in July 2024 - InvestorPlace

Read More..

Wasiq Bokhari Joins Pasqal to Revolutionize Quantum Computing with Neutral Atom Technology & Unmatched Team Collaboration – The Quantum Insider

Why did Wasiq Bokhari, Chairperson at Pasqal, join the pioneering neutral atom quantum computing company?

In his own words in a recent interview: Three principal reasons: first, I am just very interested in the whole quantum computing space. It is an amazing threshold of capability that we are about to cross, and I just want to be part of this.

Bokharis enthusiasm for quantum computings transformative potential is palpable, but he also cites the caliber of Pasqals team as a driving factor.

Second, its the team, its the peoplebrilliant, motivated, really good people, he said. One of the things Ive learned is always work with exceptional people, so I want to be part of the team.

However, it is Pasqals unique approach that sets it apart in Bokharis eyes.

The third is the approach, and on the approach, there are two things I would like to point out, Bokhari went on. First is the neutral atom approach itself; it is very scalable and has a lot of built-in advantages that other approaches do not have. But the second is the overall engineering approach that the team has taken from the very beginning.

This engineering mindset is a cornerstone of Pasqals ethos, as Bokhari elaborated: It has never been about only demonstrating something in a lab but how can we build systems that can be deployed in the real world and can solve real-world problems today versus waiting for a long period of time? How can we engage with customers?

Bokhari sees quantum computing as a transformative force.

The advent of scalable quantum computing is a fundamentally disruptive and redefining moment for us, he said, before adding that Quantum computing, enabled by us, will become a pillar of all computation along with traditional high-performance computing and all sorts of generative or non-generative AI methods. It becomes a third pillar.

At the heart of Pasqals approach is a relentless pursuit of solving complex problems.

We are super mindful about solving the hard fundamental problems that give an edge to quantum computing; we dont shy away from the hard problems, we run towards them because thats the fundamental edge that you get through the use of quantum computing, said Bokhari.

Pasqals commitment to real-world impact is unwavering, as Bokhari affirmed: The second thing thats important to us is to build systems that are not one-offs but are scalable. This requires very good, in fact, excellent engineering discipline and thinking about scalability, thinking about repeatability, thinking about yields from the very upfront in terms of the design, from every component all the way up to the systems.

Engaging with customers and solving their critical challenges is paramount to Pasqals mission.

We should not shy away from engaging with customers and engaging with their real business-critical problems because only by getting in direct contact with what matters and what delivers value can we focus our energies on delivering something that is valuable, said Bokhari.

Under Bokharis leadership, Pasqal is forging a collaborative path with its customers: Today, we view this journey as a journey together with our customers. Its a collaborative journey. We would like our customers to challenge us with hard problems, with problems that matter to them.

With its groundbreaking neutral atom technology, exceptional team, and unwavering customer-centric approach, Pasqal is poised to shape the future of quantum computing under Bokharis visionary guidance.

Go here to read the rest:
Wasiq Bokhari Joins Pasqal to Revolutionize Quantum Computing with Neutral Atom Technology & Unmatched Team Collaboration - The Quantum Insider

Read More..

AI, quantum computing and tokenisation set to transform finance Menon – Central Banking

End of drawer navigation content Skip to main content But significant barriers remain preventing the technologies from unlocking their full potential

Ravi Menon

Asset tokenisation, artificial intelligence (AI) and quantum computing are the three breakthrough technologies that have the best prospects for transforming finance, according to Ravi Menon, former managing director of the Monetary Authority of Singapore (MAS).

Menon told an audience at the Point Zero conference in Switzerland on July 3 that he believed harnessing fintech would have deeply impactful benefits, so long as substantial risks associated with new technology are mitigated. He also

Please try again later. Get in touch with our customer services team if this issue persists.

New to Central Banking? View our subscription options

If you already have an account, please sign in here.

Risk.net, FX Markets.com, WatersTechnology.com, Central Banking.com, PostOnline.co.uk, InsuranceAge.co.uk, RiskTechForum.com and Chartis-Research.com.

Please use your existing password to sign in.

All fields are mandatory unless otherwise highlighted

Most read articles loading...

Back to Top

You need to sign in to use this feature. If you dont have a Central Banking account, please register for a trial.

You need to sign in to use this feature. If you dont have a Central Banking account, please register for a trial.

To use this feature you will need an individual account. If you have one already please sign in.

Alternatively you can request an individual account

Read more here:
AI, quantum computing and tokenisation set to transform finance Menon - Central Banking

Read More..

Universal Quantum CEO Sebastian Weidt Discusses the Future & Challenges of Quantum Computing – The Quantum Insider

Just made public by his company, Sebastian Weidt, CEO of Universal Quantum, provided valuable insights into the future of quantum computing at the Web Summit in Lisbon in November 2023. While acknowledging the current hype surrounding the technology, Weidt underlined the long-term potential and challenges facing the industry.

Weidt explained quantum computing as a new form of doing computations that utilizes strange quantum effects to solve problems exponentially faster than traditional supercomputers. However, he cautioned that significant scaling is required before quantum computers can deliver on their promise.

We really need to scale these machines from where we are at the moment tens of qubits hundreds of qubits to millions of qubits, said Weidt. Thats a scary target that were aiming for here, but this is what ultimately must happen to unlock these applications.

Quantum error correction is, indeed, one of the fundamental problems with quantum computing. Weidt added that inherently, quantum systems were fragile and prone to errors. This is countered by developing error correction algorithms, which also need many physical qubits to create logical qubits of stability.

Regarding potential applications, Weidt expressed excitement about drug discovery: I think theres a lot of excitement for me personally as well around drug discovery. I think using these quantum computers to understand chemical reactions better, molecular structures better, which is at the heart of developing new drugs and currently is really hard using our currently available computing technology.

When asked about the timeline for practical quantum computers, Weidt was cautiously optimistic.

It would be nice to get some utility to something where you really feel a change because of quantum computing maybe a new drug was developed because of that, maybe we understand climate change better, maybe a new material, he answered. Theres a huge push to do that this decade, but I think this can definitely leak into the next decade as well.

Weidt also addressed concerns about quantum computers breaking current encryption systems. He urged businesses to prepare now: Please, please, please look at your encryption algorithms and check if they are quantum secure. Please make those changes now.

Looking to the future, Weidt sees a hybrid computing architecture where quantum and classical computers work together seamlessly. He punctuated that quantum computers wont replace classical systems but will complement them for specific problem-solving tasks.

As the quantum computing field continues to evolve, Weidts insights provide a balanced perspective on both the challenges and immense potential of this groundbreaking technology.

Featured image: Credit: Web Summit

View original post here:
Universal Quantum CEO Sebastian Weidt Discusses the Future & Challenges of Quantum Computing - The Quantum Insider

Read More..

Copenhagen-based Kvantify secures 10 million to unlock quantum computing for the life science sector – EU-Startups

Kvantify, a leading quantum software start-up, has announced the successful closure of a 10 million seed round. This funding will enable Kvantify to strengthen its position as a global leader in quantum computing, with an initial focus on developing applications for the life science sector.

The seed round is led by Danish VC Dreamcraft, together with biotech investor Lundbeckfonden BioCapital and the private investment company 2degrees. Other notable investors include international sector-focused tech investor Redstone VC, Danish lead quantum VC 2xN as well as EIFO.

Lundbeckfonden BioCapital is a large Danish investor focused on local life science companies, supporting the translation and commercialization of ground-breaking science. This is Lundbeckfonden BioCapitals first investment outside the therapeutics space.

Hans Henrik Knudsen, CEO of Kvantify, commented: On behalf of the founding team, we are incredibly excited about the completion of our 10 million seed round, which marks a significant milestone for Kvantify. This funding not only validates our vision of leveraging quantum computing to revolutionize the life sciences industry but also provides us with the resources and strategic partnerships needed to accelerate our development and growth. With the support of new and existing investors, we are well-positioned to continue to bring groundbreaking solutions to market.

The investment will accelerate the development of Kvantifys innovative solutions that aim to leverage quantum computing to address complex problems in drug discovery and beyond. It will also boost the further development of quantum algorithms for chemical simulation, expanding their applicability across various industries.

With our investment in Kvantify, we are broadening our footprint in and commitment to further strengthening the Danish life science ecosystem. Quantum computing can deliver accuracy and derisking to the early stages of drug development to a level not possible with classical computers, thereby enabling faster speed to market. We are therefore excited about this opportunity and look forward to working with the Kvantify team to bridge quantum computing and drug development to the future benefit of patients, said Jacob Falck Hansen, Partner at Lundbeckfonden BioCapital.

Danish VC Dreamcraft invests in tech-driven companies, from pre-seed to series A, and has a proven track record with B2B SaaS software.

Were thrilled to partner with the team at Kvantify as they take a significant step forward in their mission to fulfill the promise of industrial applications of quantum computers. The potential of quantum chemical computational drug discovery is massive and represents a truly exciting beachhead market. We cannot wait to see how Kvantify will help solve todays seemingly impossible problems and serve as a crucial tool in designing the solutions of the future, added Carsten Salling, General Partner at Dreamcraft.

Redstone QAI Quantum Fund is a highly specialized venture capital fund that focuses on investing in groundbreaking technologies within the quantum technologies sector.

Kvantifys focus on applying quantum computing to life sciences and further industrial use cases across various sectors aligns with our strategic vision of advancing practical and impactful quantum solutions. With their interdisciplinary team, in-depth knowledge of quantum technology, and innovative approach to enhancing computational eiciency, Kvantify is perfectly placed to bring tremendous value to commercial markets, said Marco Stutz, Partner at Redstone.

Read more:
Copenhagen-based Kvantify secures 10 million to unlock quantum computing for the life science sector - EU-Startups

Read More..

The Urgent Need for Post-Quantum Cryptography in Critical Infrastructure | by Cyber Safe Institute | Jul, 2024 – Medium

The rapid evolution of quantum computing poses a significant threat to current cybersecurity practices, particularly for critical infrastructures (CI) such as power grids, transportation systems, and healthcare facilities. These systems rely heavily on secure communications and data integrity to ensure operational stability and public safety. The potential for quantum computers to break widely used cryptographic algorithms like RSA and ECC necessitates a proactive transition to post-quantum cryptography (PQC) [15]. This article examines the importance of cybersecurity in CI, the emergence of quantum computing as a threat, and the potential of PQC in safeguarding these vital systems against future attacks.

The 21st century has witnessed an unprecedented surge in interconnectedness, with communication technologies permeating nearly every aspect of modern society [6]. This hyperconnected paradigm extends beyond personal interactions to encompass the control of industrial machines, financial transactions, and the management of critical infrastructure [6].

Critical infrastructure, encompassing sectors like energy, transportation, and communication, forms the backbone of modern society. Cyber vulnerabilities in these systems can have catastrophic consequences, potentially disrupting essential services, causing economic damage, and even leading to loss of life [5, 7]. For instance, a cyberattack on a power grid could lead to widespread blackouts, crippling healthcare facilities, transportation systems, and emergency services [4, 7]. The interconnected nature of CI amplifies these risks, as a single point of failure can trigger a cascading effect, propagating disruptions across multiple sectors [7, 8].

Given the high stakes involved, cybersecurity has become paramount in protecting CI from increasingly sophisticated cyberattacks. In 2022 alone, there were an estimated 2,200 cyberattacks per day, highlighting the constant threat faced by businesses and infrastructure [7]. This threat is further exacerbated by growing geopolitical tensions, as state-sponsored actors increasingly leverage cyberattacks to disrupt critical infrastructure and sow discord [7].

Traditional cryptography, the foundation of secure communications, relies on mathematical problems that are computationally infeasible for classical computers to solve within a practical timeframe [9, 10]. Public-key cryptosystems, like RSA and ECC, underpin secure key exchange mechanisms and digital signatures, ensuring confidentiality, integrity, and authenticity in digital communications [8, 9, 11].

Quantum computers, leveraging the principles of quantum mechanics, possess the potential to dramatically outperform classical computers in solving specific types of problems [1216]. Shors algorithm, a quantum algorithm, can efficiently factor large numbers and compute discrete logarithms, tasks that form the basis of RSA and ECC security [3, 10, 12]. This capability undermines the security assumptions of these widely deployed public-key cryptosystems, rendering them vulnerable to attacks once sufficiently powerful quantum computers become a reality [3, 17].

While estimates vary, experts suggest that a fault-tolerant quantum computer capable of executing Shors algorithm could be operational within the next two decades [17, 18]. This looming threat, often referred to as the quantum apocalypse, has prompted a global effort to develop and deploy quantum-resistant cryptographic solutions [2, 3, 5].

The harvest now, decrypt later paradigm underscores the urgency of this transition [17]. Malicious actors could exploit the longevity of encrypted data by capturing sensitive information today and decrypting it later, once they have access to quantum computers. This necessitates a proactive approach to ensure the long-term confidentiality of sensitive information, particularly in sectors like finance, government, and critical infrastructure, where data often retains its value for extended periods [17, 18].

Post-quantum cryptography (PQC) encompasses classical cryptographic techniques that are believed to be resistant to attacks from both classical and quantum computers [5, 19]. PQC algorithms are based on mathematical problems that are not known to be efficiently solvable by quantum algorithms, offering a potential solution to the threat posed by quantum computers [19, 20].

There are seven major families of PQC algorithms, each relying on a different hard mathematical problem:

Integrating PQC into operational technology (OT) environments presents unique challenges compared to traditional IT systems. OT systems, responsible for controlling and monitoring physical processes in CI, often have long lifespans, limited computational resources, and stringent real-time requirements [17, 34, 35]. These constraints necessitate careful consideration when selecting and deploying PQC algorithms.

The extended lifespan of OT equipment, often exceeding 20 years, poses a significant challenge for PQC migration [17, 34]. As quantum computing technology advances, PQC algorithms deemed secure today might become vulnerable in the future. This necessitates flexible and adaptable solutions that allow for future upgrades and algorithm agility [36, 37]. Hybrid cryptography, combining classical and PQC algorithms during the migration period, offers a viable approach to address this challenge, ensuring continued security even if one of the algorithms is compromised [36].

Furthermore, the computational limitations and real-time constraints of OT systems require PQC algorithms with low latency and minimal overhead [2, 29, 35]. Lattice-based cryptography, with its relatively small key sizes and efficient implementations, has emerged as a frontrunner for OT deployments [29, 35]. However, ongoing research and development are crucial to optimize these algorithms further and address potential vulnerabilities, such as side-channel attacks [35, 38].

Side-channel attacks exploit information leakage from physical implementations of cryptographic systems, such as power consumption, electromagnetic emissions, or timing variations [39]. While these attacks can threaten both classical and PQC implementations, they are particularly relevant in OT environments, where attackers might have physical access to devices [38, 39]. Robust countermeasures, including hardware and software defenses, are essential to mitigate the risk of side-channel attacks on PQC implementations in CI [38].

The advent of quantum computing presents a significant challenge to the long-term security of critical infrastructure. The potential for quantum computers to break widely used cryptographic algorithms necessitates a proactive and strategic approach to cybersecurity in CI.

Post-quantum cryptography offers a promising path to securing these vital systems against quantum threats. However, the unique constraints of OT environments require careful consideration when selecting and deploying PQC algorithms. Lattice-based cryptography, with its favorable performance characteristics and active research community, appears well-suited for CI applications. However, ongoing research and development are crucial to address potential vulnerabilities and ensure long-term security in the face of evolving quantum threats.

The transition to PQC in CI is not merely a technical challenge but a multifaceted endeavor requiring collaboration between governments, industry stakeholders, and the research community. Standardizing PQC algorithms, developing secure and efficient implementations, and addressing potential vulnerabilities like side-channel attacks are all crucial steps toward achieving quantum-resistant security for CI.

The time to act is now. By investing in PQC research, development, and deployment, we can ensure the resilience of critical infrastructure and safeguard the essential services that underpin modern society in the post-quantum era.

More:
The Urgent Need for Post-Quantum Cryptography in Critical Infrastructure | by Cyber Safe Institute | Jul, 2024 - Medium

Read More..